From 789c4cb94a4313e08bfdd9cb99ebf97bc534864f Mon Sep 17 00:00:00 2001 From: Cloud SDK Librarian Date: Wed, 18 Feb 2026 08:17:01 +0000 Subject: [PATCH] feat: generate libraries --- .librarian/state.yaml | 22 +- .../dataproc_v1/types/autoscaling_policies.py | 27 + .../cloud/dataproc_v1/types/clusters.py | 28 + .../test_autoscaling_policy_service.py | 50 + .../dataproc_v1/test_cluster_controller.py | 2 + .../test_workflow_template_service.py | 3 + .../dialogflowcx_v3beta1/types/session.py | 9 +- .../google/cloud/kms_inventory/__init__.py | 4 + .../google/cloud/kms_inventory_v1/__init__.py | 4 + .../key_tracking_service/async_client.py | 36 +- .../services/key_tracking_service/client.py | 47 +- .../key_tracking_service/transports/grpc.py | 14 +- .../transports/grpc_asyncio.py | 14 +- .../key_tracking_service/transports/rest.py | 4 +- .../transports/rest_base.py | 4 + .../cloud/kms_inventory_v1/types/__init__.py | 4 + .../types/key_tracking_service.py | 109 +- .../test_key_tracking_service.py | 64 +- .../google/cloud/kms/__init__.py | 16 + .../google/cloud/kms_v1/__init__.py | 16 + .../google/cloud/kms_v1/gapic_metadata.json | 60 + .../services/autokey_admin/async_client.py | 24 +- .../kms_v1/services/autokey_admin/client.py | 24 +- .../services/autokey_admin/transports/grpc.py | 20 +- .../autokey_admin/transports/grpc_asyncio.py | 20 +- .../services/autokey_admin/transports/rest.py | 16 +- .../autokey_admin/transports/rest_base.py | 9 + .../key_management_service/async_client.py | 547 +- .../services/key_management_service/client.py | 553 +- .../services/key_management_service/pagers.py | 156 + .../key_management_service/transports/base.py | 106 +- .../key_management_service/transports/grpc.py | 149 +- .../transports/grpc_asyncio.py | 216 +- .../key_management_service/transports/rest.py | 1216 +- .../transports/rest_base.py | 188 + .../google/cloud/kms_v1/types/__init__.py | 16 + .../cloud/kms_v1/types/autokey_admin.py | 55 +- .../google/cloud/kms_v1/types/resources.py | 79 + .../google/cloud/kms_v1/types/service.py | 161 + ...agement_service_delete_crypto_key_async.py | 57 + ...nagement_service_delete_crypto_key_sync.py | 57 + ...service_delete_crypto_key_version_async.py | 57 + ..._service_delete_crypto_key_version_sync.py | 57 + ...ment_service_get_retired_resource_async.py | 53 + ...ement_service_get_retired_resource_sync.py | 53 + ...service_import_crypto_key_version_async.py | 2 +- ..._service_import_crypto_key_version_sync.py | 2 +- ...nt_service_list_retired_resources_async.py | 54 + ...ent_service_list_retired_resources_sync.py | 54 + .../snippet_metadata_google.cloud.kms.v1.json | 778 +- .../unit/gapic/kms_v1/test_autokey_admin.py | 33 + .../kms_v1/test_key_management_service.py | 10436 ++++++++++------ .../types/common.py | 7 + .../cloud/storagebatchoperations/__init__.py | 8 + .../storagebatchoperations_v1/__init__.py | 8 + .../gapic_metadata.json | 30 + .../storage_batch_operations/async_client.py | 250 + .../storage_batch_operations/client.py | 262 + .../storage_batch_operations/pagers.py | 162 + .../transports/base.py | 34 + .../transports/grpc.py | 58 + .../transports/grpc_asyncio.py | 68 + .../transports/rest.py | 452 + .../transports/rest_base.py | 96 + .../types/__init__.py | 8 + .../types/storage_batch_operations.py | 106 + .../types/storage_batch_operations_types.py | 179 + ...oogle.cloud.storagebatchoperations.v1.json | 322 + ...h_operations_get_bucket_operation_async.py | 53 + ...ch_operations_get_bucket_operation_sync.py | 53 + ...operations_list_bucket_operations_async.py | 54 + ..._operations_list_bucket_operations_sync.py | 54 + .../test_storage_batch_operations.py | 2635 +++- .../maps/places_v1/types/content_block.py | 27 +- .../places_v1/types/contextual_content.py | 4 +- .../google/maps/places_v1/types/place.py | 62 + .../grafeas/grafeas_v1/types/common.py | 6 + .../grafeas/grafeas_v1/types/vulnerability.py | 8 + .../unit/gapic/grafeas_v1/test_grafeas.py | 4 + 79 files changed, 16489 insertions(+), 4306 deletions(-) create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_async.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_async.py create mode 100644 packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py create mode 100644 packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py create mode 100644 packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py create mode 100644 packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py create mode 100644 packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 9d6ef721db83..96cd049f679a 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1309,7 +1309,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataproc version: 5.24.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/dataproc/v1 service_config: dataproc_v1.yaml @@ -1416,7 +1416,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dialogflow-cx version: 2.3.0 - last_generated_commit: 87e3579c041a50f985f8de71e9494c8fc85d520b + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/dialogflow/cx/v3 service_config: dialogflow_v3.yaml @@ -1853,7 +1853,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-kms version: 3.10.0 - last_generated_commit: 87e3579c041a50f985f8de71e9494c8fc85d520b + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/kms/v1 service_config: cloudkms_v1.yaml @@ -1868,7 +1868,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-kms-inventory version: 0.4.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/kms/inventory/v1 service_config: kmsinventory_v1.yaml @@ -2155,7 +2155,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-connectivity version: 2.13.0 - last_generated_commit: 87e3579c041a50f985f8de71e9494c8fc85d520b + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/networkconnectivity/v1 service_config: networkconnectivity_v1.yaml @@ -2622,7 +2622,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-saasplatform-saasservicemgmt version: 0.3.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/saasplatform/saasservicemgmt/v1beta1 service_config: saasservicemgmt_v1beta1.yaml @@ -2767,7 +2767,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-service-management version: 1.15.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/api/servicemanagement/v1 service_config: servicemanagement_v1.yaml @@ -2887,7 +2887,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-storagebatchoperations version: 0.3.0 - last_generated_commit: d4a34bf03d617723146fe3ae15192c4d93981a27 + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/cloud/storagebatchoperations/v1 service_config: storagebatchoperations_v1.yaml @@ -3383,7 +3383,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-places version: 0.6.0 - last_generated_commit: e8365a7f88fabe8717cb8322b8ce784b03b6daea + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/maps/places/v1 service_config: places_v1.yaml @@ -3690,7 +3690,7 @@ libraries: tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.72.0 - last_generated_commit: 9eea40c74d97622bb0aa406dd313409a376cc73b + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: google/api service_config: serviceconfig.yaml @@ -3717,7 +3717,7 @@ libraries: tag_format: '{id}-v{version}' - id: grafeas version: 1.19.1 - last_generated_commit: 725496d32a359a40dd773995d3fda0342b440e15 + last_generated_commit: 1133adb136f742df62864f1d9d307df25d451880 apis: - path: grafeas/v1 service_config: grafeas_v1.yaml diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py index 72977200d584..5f2dd6924c18 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -82,8 +82,30 @@ class AutoscalingPolicy(proto.Message): 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with an autoscaling policy. + cluster_type (google.cloud.dataproc_v1.types.AutoscalingPolicy.ClusterType): + Optional. The type of the clusters for which + this autoscaling policy is to be configured. """ + class ClusterType(proto.Enum): + r"""The type of the clusters for which this autoscaling policy is + to be configured. + + Values: + CLUSTER_TYPE_UNSPECIFIED (0): + Not set. + STANDARD (1): + Standard dataproc cluster with a minimum of + two primary workers. + ZERO_SCALE (2): + Clusters that can use only secondary workers + and be scaled down to zero secondary worker + nodes. + """ + CLUSTER_TYPE_UNSPECIFIED = 0 + STANDARD = 1 + ZERO_SCALE = 2 + id: str = proto.Field( proto.STRING, number=1, @@ -113,6 +135,11 @@ class AutoscalingPolicy(proto.Message): proto.STRING, number=6, ) + cluster_type: ClusterType = proto.Field( + proto.ENUM, + number=7, + enum=ClusterType, + ) class BasicAutoscalingAlgorithm(proto.Message): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index 55e2f309e3c8..e57fcae7575c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -179,6 +179,8 @@ class ClusterConfig(proto.Message): r"""The cluster config. Attributes: + cluster_type (google.cloud.dataproc_v1.types.ClusterConfig.ClusterType): + Optional. The type of the cluster. cluster_tier (google.cloud.dataproc_v1.types.ClusterConfig.ClusterTier): Optional. The cluster tier. config_bucket (str): @@ -260,6 +262,27 @@ class ClusterConfig(proto.Message): Optional. The node group settings. """ + class ClusterType(proto.Enum): + r"""The type of the cluster. + + Values: + CLUSTER_TYPE_UNSPECIFIED (0): + Not set. + STANDARD (1): + Standard dataproc cluster with a minimum of + two primary workers. + SINGLE_NODE (2): + https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/single-node-clusters + ZERO_SCALE (3): + Clusters that can use only secondary workers + and be scaled down to zero secondary worker + nodes. + """ + CLUSTER_TYPE_UNSPECIFIED = 0 + STANDARD = 1 + SINGLE_NODE = 2 + ZERO_SCALE = 3 + class ClusterTier(proto.Enum): r"""The cluster tier. @@ -275,6 +298,11 @@ class ClusterTier(proto.Enum): CLUSTER_TIER_STANDARD = 1 CLUSTER_TIER_PREMIUM = 2 + cluster_type: ClusterType = proto.Field( + proto.ENUM, + number=27, + enum=ClusterType, + ) cluster_tier: ClusterTier = proto.Field( proto.ENUM, number=29, diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index c9991a656cd2..44db27e750e4 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -1408,6 +1408,7 @@ def test_create_autoscaling_policy(request_type, transport: str = "grpc"): call.return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) response = client.create_autoscaling_policy(request) @@ -1421,6 +1422,10 @@ def test_create_autoscaling_policy(request_type, transport: str = "grpc"): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) def test_create_autoscaling_policy_non_empty_request_with_auto_populated_field(): @@ -1558,6 +1563,7 @@ async def test_create_autoscaling_policy_async( autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) response = await client.create_autoscaling_policy(request) @@ -1572,6 +1578,10 @@ async def test_create_autoscaling_policy_async( assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.asyncio @@ -1765,6 +1775,7 @@ def test_update_autoscaling_policy(request_type, transport: str = "grpc"): call.return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) response = client.update_autoscaling_policy(request) @@ -1778,6 +1789,10 @@ def test_update_autoscaling_policy(request_type, transport: str = "grpc"): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) def test_update_autoscaling_policy_non_empty_request_with_auto_populated_field(): @@ -1911,6 +1926,7 @@ async def test_update_autoscaling_policy_async( autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) response = await client.update_autoscaling_policy(request) @@ -1925,6 +1941,10 @@ async def test_update_autoscaling_policy_async( assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.asyncio @@ -2108,6 +2128,7 @@ def test_get_autoscaling_policy(request_type, transport: str = "grpc"): call.return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) response = client.get_autoscaling_policy(request) @@ -2121,6 +2142,10 @@ def test_get_autoscaling_policy(request_type, transport: str = "grpc"): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) def test_get_autoscaling_policy_non_empty_request_with_auto_populated_field(): @@ -2258,6 +2283,7 @@ async def test_get_autoscaling_policy_async( autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) response = await client.get_autoscaling_policy(request) @@ -2272,6 +2298,10 @@ async def test_get_autoscaling_policy_async( assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.asyncio @@ -4577,6 +4607,7 @@ async def test_create_autoscaling_policy_empty_call_grpc_asyncio(): autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) await client.create_autoscaling_policy(request=None) @@ -4607,6 +4638,7 @@ async def test_update_autoscaling_policy_empty_call_grpc_asyncio(): autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) await client.update_autoscaling_policy(request=None) @@ -4637,6 +4669,7 @@ async def test_get_autoscaling_policy_empty_call_grpc_asyncio(): autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) ) await client.get_autoscaling_policy(request=None) @@ -4765,6 +4798,7 @@ def test_create_autoscaling_policy_rest_call_success(request_type): "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, "secondary_worker_config": {}, "labels": {}, + "cluster_type": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -4843,6 +4877,7 @@ def get_message_fields(field): return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) # Wrap the value into a proper Response obj @@ -4861,6 +4896,10 @@ def get_message_fields(field): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4996,6 +5035,7 @@ def test_update_autoscaling_policy_rest_call_success(request_type): "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, "secondary_worker_config": {}, "labels": {}, + "cluster_type": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5074,6 +5114,7 @@ def get_message_fields(field): return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) # Wrap the value into a proper Response obj @@ -5092,6 +5133,10 @@ def get_message_fields(field): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -5215,6 +5260,7 @@ def test_get_autoscaling_policy_rest_call_success(request_type): return_value = autoscaling_policies.AutoscalingPolicy( id="id_value", name="name_value", + cluster_type=autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD, ) # Wrap the value into a proper Response obj @@ -5233,6 +5279,10 @@ def test_get_autoscaling_policy_rest_call_success(request_type): assert isinstance(response, autoscaling_policies.AutoscalingPolicy) assert response.id == "id_value" assert response.name == "name_value" + assert ( + response.cluster_type + == autoscaling_policies.AutoscalingPolicy.ClusterType.STANDARD + ) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 5faed7e0b19b..975e7e8d4c33 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -6362,6 +6362,7 @@ def test_create_cluster_rest_call_success(request_type): "project_id": "project_id_value", "cluster_name": "cluster_name_value", "config": { + "cluster_type": 1, "cluster_tier": 1, "config_bucket": "config_bucket_value", "temp_bucket": "temp_bucket_value", @@ -6774,6 +6775,7 @@ def test_update_cluster_rest_call_success(request_type): "project_id": "project_id_value", "cluster_name": "cluster_name_value", "config": { + "cluster_type": 1, "cluster_tier": 1, "config_bucket": "config_bucket_value", "temp_bucket": "temp_bucket_value", diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 30d07bb953fc..2b0ae5a6ce8f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -5987,6 +5987,7 @@ def test_create_workflow_template_rest_call_success(request_type): "managed_cluster": { "cluster_name": "cluster_name_value", "config": { + "cluster_type": 1, "cluster_tier": 1, "config_bucket": "config_bucket_value", "temp_bucket": "temp_bucket_value", @@ -6768,6 +6769,7 @@ def test_instantiate_inline_workflow_template_rest_call_success(request_type): "managed_cluster": { "cluster_name": "cluster_name_value", "config": { + "cluster_type": 1, "cluster_tier": 1, "config_bucket": "config_bucket_value", "temp_bucket": "temp_bucket_value", @@ -7275,6 +7277,7 @@ def test_update_workflow_template_rest_call_success(request_type): "managed_cluster": { "cluster_name": "cluster_name_value", "config": { + "cluster_type": 1, "cluster_tier": 1, "config_bucket": "config_bucket_value", "temp_bucket": "temp_bucket_value", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py index 9794b65edaef..b94323181a64 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py @@ -84,7 +84,8 @@ class DetectIntentResponseView(proto.Enum): Values: DETECT_INTENT_RESPONSE_VIEW_UNSPECIFIED (0): - Not specified. ``FULL`` will be used. + Not specified. ``DETECT_INTENT_RESPONSE_VIEW_DEFAULT`` will + be used. DETECT_INTENT_RESPONSE_VIEW_FULL (1): Full response view includes all fields. DETECT_INTENT_RESPONSE_VIEW_BASIC (2): @@ -95,10 +96,16 @@ class DetectIntentResponseView(proto.Enum): --------------------------------------------------------------------------------------------- [QueryResult.generative_info][google.cloud.dialogflow.cx.v3beta1.QueryResult.generative_info] + DETECT_INTENT_RESPONSE_VIEW_DEFAULT (3): + Default response view omits the following fields: + ------------------------------------------------- + + [QueryResult.trace_blocks][google.cloud.dialogflow.cx.v3beta1.QueryResult.trace_blocks] """ DETECT_INTENT_RESPONSE_VIEW_UNSPECIFIED = 0 DETECT_INTENT_RESPONSE_VIEW_FULL = 1 DETECT_INTENT_RESPONSE_VIEW_BASIC = 2 + DETECT_INTENT_RESPONSE_VIEW_DEFAULT = 3 class AnswerFeedback(proto.Message): diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/__init__.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/__init__.py index e3829b9bb43e..ae0f7e69a887 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/__init__.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/__init__.py @@ -35,11 +35,13 @@ ListCryptoKeysResponse, ) from google.cloud.kms_inventory_v1.types.key_tracking_service import ( + FallbackScope, GetProtectedResourcesSummaryRequest, ProtectedResource, ProtectedResourcesSummary, SearchProtectedResourcesRequest, SearchProtectedResourcesResponse, + Warning, ) __all__ = ( @@ -54,4 +56,6 @@ "ProtectedResourcesSummary", "SearchProtectedResourcesRequest", "SearchProtectedResourcesResponse", + "Warning", + "FallbackScope", ) diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/__init__.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/__init__.py index f7f5cf214e12..443ca049eaa1 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/__init__.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/__init__.py @@ -38,11 +38,13 @@ ) from .types.key_dashboard_service import ListCryptoKeysRequest, ListCryptoKeysResponse from .types.key_tracking_service import ( + FallbackScope, GetProtectedResourcesSummaryRequest, ProtectedResource, ProtectedResourcesSummary, SearchProtectedResourcesRequest, SearchProtectedResourcesResponse, + Warning, ) if hasattr(api_core, "check_python_version") and hasattr( @@ -142,6 +144,7 @@ def _get_version(dependency_name): __all__ = ( "KeyDashboardServiceAsyncClient", "KeyTrackingServiceAsyncClient", + "FallbackScope", "GetProtectedResourcesSummaryRequest", "KeyDashboardServiceClient", "KeyTrackingServiceClient", @@ -151,4 +154,5 @@ def _get_version(dependency_name): "ProtectedResourcesSummary", "SearchProtectedResourcesRequest", "SearchProtectedResourcesResponse", + "Warning", ) diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py index 15da5e3918f7..22689f741ddf 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/async_client.py @@ -83,6 +83,12 @@ class KeyTrackingServiceAsyncClient: parse_crypto_key_version_path = staticmethod( KeyTrackingServiceClient.parse_crypto_key_version_path ) + protected_resource_scope_path = staticmethod( + KeyTrackingServiceClient.protected_resource_scope_path + ) + parse_protected_resource_scope_path = staticmethod( + KeyTrackingServiceClient.parse_protected_resource_scope_path + ) protected_resources_summary_path = staticmethod( KeyTrackingServiceClient.protected_resources_summary_path ) @@ -318,9 +324,15 @@ async def get_protected_resources_summary( ) -> key_tracking_service.ProtectedResourcesSummary: r"""Returns aggregate information about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey]. - Only resources within the same Cloud organization as the key - will be returned. The project that holds the key must be part of - an organization in order for this call to succeed. + By default, summary of resources within the same Cloud + organization as the key will be returned, which requires the KMS + organization service account to be configured(refer + https://docs.cloud.google.com/kms/docs/view-key-usage#required-roles). + If the KMS organization service account is not configured or + key's project is not part of an organization, set + [fallback_scope][google.cloud.kms.inventory.v1.GetProtectedResourcesSummaryRequest.fallback_scope] + to ``FALLBACK_SCOPE_PROJECT`` to retrieve a summary of protected + resources within the key's project. .. code-block:: python @@ -371,8 +383,8 @@ async def sample_get_protected_resources_summary(): google.cloud.kms_inventory_v1.types.ProtectedResourcesSummary: Aggregate information about the resources protected by a Cloud KMS key - in the same Cloud organization as the - key. + in the same Cloud organization/project + as the key. """ # Create or coerce a protobuf request object. @@ -440,7 +452,7 @@ async def search_protected_resources( ) -> pagers.SearchProtectedResourcesAsyncPager: r"""Returns metadata about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey] in the - given Cloud organization. + given Cloud organization/project. .. code-block:: python @@ -475,8 +487,16 @@ async def sample_search_protected_resources(): The request object. Request message for [KeyTrackingService.SearchProtectedResources][google.cloud.kms.inventory.v1.KeyTrackingService.SearchProtectedResources]. scope (:class:`str`): - Required. Resource name of the - organization. Example: organizations/123 + Required. A scope can be an organization or a project. + Resources protected by the crypto key in provided scope + will be returned. + + The following values are allowed: + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/12345678") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py index f6c5306efb89..8eb879cb2538 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py @@ -263,6 +263,23 @@ def parse_crypto_key_version_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def protected_resource_scope_path( + organization: str, + ) -> str: + """Returns a fully-qualified protected_resource_scope string.""" + return "organizations/{organization}/protectedResourceScope".format( + organization=organization, + ) + + @staticmethod + def parse_protected_resource_scope_path(path: str) -> Dict[str, str]: + """Parses a protected_resource_scope path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/protectedResourceScope$", path + ) + return m.groupdict() if m else {} + @staticmethod def protected_resources_summary_path( project: str, @@ -787,9 +804,15 @@ def get_protected_resources_summary( ) -> key_tracking_service.ProtectedResourcesSummary: r"""Returns aggregate information about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey]. - Only resources within the same Cloud organization as the key - will be returned. The project that holds the key must be part of - an organization in order for this call to succeed. + By default, summary of resources within the same Cloud + organization as the key will be returned, which requires the KMS + organization service account to be configured(refer + https://docs.cloud.google.com/kms/docs/view-key-usage#required-roles). + If the KMS organization service account is not configured or + key's project is not part of an organization, set + [fallback_scope][google.cloud.kms.inventory.v1.GetProtectedResourcesSummaryRequest.fallback_scope] + to ``FALLBACK_SCOPE_PROJECT`` to retrieve a summary of protected + resources within the key's project. .. code-block:: python @@ -840,8 +863,8 @@ def sample_get_protected_resources_summary(): google.cloud.kms_inventory_v1.types.ProtectedResourcesSummary: Aggregate information about the resources protected by a Cloud KMS key - in the same Cloud organization as the - key. + in the same Cloud organization/project + as the key. """ # Create or coerce a protobuf request object. @@ -908,7 +931,7 @@ def search_protected_resources( ) -> pagers.SearchProtectedResourcesPager: r"""Returns metadata about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey] in the - given Cloud organization. + given Cloud organization/project. .. code-block:: python @@ -943,8 +966,16 @@ def sample_search_protected_resources(): The request object. Request message for [KeyTrackingService.SearchProtectedResources][google.cloud.kms.inventory.v1.KeyTrackingService.SearchProtectedResources]. scope (str): - Required. Resource name of the - organization. Example: organizations/123 + Required. A scope can be an organization or a project. + Resources protected by the crypto key in provided scope + will be returned. + + The following values are allowed: + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/12345678") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc.py index 07ad07f64131..7bc7acaca0fb 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc.py @@ -334,9 +334,15 @@ def get_protected_resources_summary( Returns aggregate information about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey]. - Only resources within the same Cloud organization as the key - will be returned. The project that holds the key must be part of - an organization in order for this call to succeed. + By default, summary of resources within the same Cloud + organization as the key will be returned, which requires the KMS + organization service account to be configured(refer + https://docs.cloud.google.com/kms/docs/view-key-usage#required-roles). + If the KMS organization service account is not configured or + key's project is not part of an organization, set + [fallback_scope][google.cloud.kms.inventory.v1.GetProtectedResourcesSummaryRequest.fallback_scope] + to ``FALLBACK_SCOPE_PROJECT`` to retrieve a summary of protected + resources within the key's project. Returns: Callable[[~.GetProtectedResourcesSummaryRequest], @@ -369,7 +375,7 @@ def search_protected_resources( Returns metadata about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey] in the - given Cloud organization. + given Cloud organization/project. Returns: Callable[[~.SearchProtectedResourcesRequest], diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc_asyncio.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc_asyncio.py index 11f264dd00c4..439c9da345c0 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/grpc_asyncio.py @@ -342,9 +342,15 @@ def get_protected_resources_summary( Returns aggregate information about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey]. - Only resources within the same Cloud organization as the key - will be returned. The project that holds the key must be part of - an organization in order for this call to succeed. + By default, summary of resources within the same Cloud + organization as the key will be returned, which requires the KMS + organization service account to be configured(refer + https://docs.cloud.google.com/kms/docs/view-key-usage#required-roles). + If the KMS organization service account is not configured or + key's project is not part of an organization, set + [fallback_scope][google.cloud.kms.inventory.v1.GetProtectedResourcesSummaryRequest.fallback_scope] + to ``FALLBACK_SCOPE_PROJECT`` to retrieve a summary of protected + resources within the key's project. Returns: Callable[[~.GetProtectedResourcesSummaryRequest], @@ -377,7 +383,7 @@ def search_protected_resources( Returns metadata about the resources protected by the given Cloud KMS [CryptoKey][google.cloud.kms.v1.CryptoKey] in the - given Cloud organization. + given Cloud organization/project. Returns: Callable[[~.SearchProtectedResourcesRequest], diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py index ad9e0a8631e0..22b0431d78ab 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py @@ -343,8 +343,8 @@ def __call__( ~.key_tracking_service.ProtectedResourcesSummary: Aggregate information about the resources protected by a Cloud KMS key - in the same Cloud organization as the - key. + in the same Cloud organization/project + as the key. """ diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest_base.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest_base.py index acabf0843dfa..fdbb1d1f2e1e 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest_base.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest_base.py @@ -159,6 +159,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{scope=organizations/*}/protectedResources:search", }, + { + "method": "get", + "uri": "/v1/{scope=projects/*}/protectedResources:search", + }, ] return http_options diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/__init__.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/__init__.py index 76f11b5eb4f3..af03ac8ea9a6 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/__init__.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/__init__.py @@ -15,11 +15,13 @@ # from .key_dashboard_service import ListCryptoKeysRequest, ListCryptoKeysResponse from .key_tracking_service import ( + FallbackScope, GetProtectedResourcesSummaryRequest, ProtectedResource, ProtectedResourcesSummary, SearchProtectedResourcesRequest, SearchProtectedResourcesResponse, + Warning, ) __all__ = ( @@ -30,4 +32,6 @@ "ProtectedResourcesSummary", "SearchProtectedResourcesRequest", "SearchProtectedResourcesResponse", + "Warning", + "FallbackScope", ) diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/key_tracking_service.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/key_tracking_service.py index b77beb6dd56d..0d2da5165602 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/key_tracking_service.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/types/key_tracking_service.py @@ -23,15 +23,33 @@ __protobuf__ = proto.module( package="google.cloud.kms.inventory.v1", manifest={ + "FallbackScope", "GetProtectedResourcesSummaryRequest", "ProtectedResourcesSummary", "SearchProtectedResourcesRequest", "SearchProtectedResourcesResponse", "ProtectedResource", + "Warning", }, ) +class FallbackScope(proto.Enum): + r"""Specifies the scope to use if the organization service agent + is not configured. + + Values: + FALLBACK_SCOPE_UNSPECIFIED (0): + Unspecified scope type. + FALLBACK_SCOPE_PROJECT (1): + If set to ``FALLBACK_SCOPE_PROJECT``, the API will fall back + to using key's project as request scope if the kms + organization service account is not configured. + """ + FALLBACK_SCOPE_UNSPECIFIED = 0 + FALLBACK_SCOPE_PROJECT = 1 + + class GetProtectedResourcesSummaryRequest(proto.Message): r"""Request message for [KeyTrackingService.GetProtectedResourcesSummary][google.cloud.kms.inventory.v1.KeyTrackingService.GetProtectedResourcesSummary]. @@ -40,17 +58,25 @@ class GetProtectedResourcesSummaryRequest(proto.Message): name (str): Required. The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey]. + fallback_scope (google.cloud.kms_inventory_v1.types.FallbackScope): + Optional. The scope to use if the kms + organization service account is not configured. """ name: str = proto.Field( proto.STRING, number=1, ) + fallback_scope: "FallbackScope" = proto.Field( + proto.ENUM, + number=2, + enum="FallbackScope", + ) class ProtectedResourcesSummary(proto.Message): r"""Aggregate information about the resources protected by a - Cloud KMS key in the same Cloud organization as the key. + Cloud KMS key in the same Cloud organization/project as the key. Attributes: name (str): @@ -74,6 +100,12 @@ class ProtectedResourcesSummary(proto.Message): locations (MutableMapping[str, int]): The number of resources protected by the key grouped by region. + warnings (MutableSequence[google.cloud.kms_inventory_v1.types.Warning]): + Warning messages for the state of response + [ProtectedResourcesSummary][google.cloud.kms.inventory.v1.ProtectedResourcesSummary] + For example, if the organization service account is not + configured, INSUFFICIENT_PERMISSIONS_PARTIAL_DATA warning + will be returned. """ name: str = proto.Field( @@ -103,6 +135,11 @@ class ProtectedResourcesSummary(proto.Message): proto.INT64, number=4, ) + warnings: MutableSequence["Warning"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="Warning", + ) class SearchProtectedResourcesRequest(proto.Message): @@ -111,8 +148,16 @@ class SearchProtectedResourcesRequest(proto.Message): Attributes: scope (str): - Required. Resource name of the organization. - Example: organizations/123 + Required. A scope can be an organization or a project. + Resources protected by the crypto key in provided scope will + be returned. + + The following values are allowed: + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/12345678") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") crypto_key (str): Required. The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey]. @@ -288,4 +333,62 @@ class ProtectedResource(proto.Message): ) +class Warning(proto.Message): + r"""A warning message that indicates potential problems with the + response data. + + Attributes: + warning_code (google.cloud.kms_inventory_v1.types.Warning.WarningCode): + The specific warning code for the displayed + message. + display_message (str): + The literal message providing context and + details about the warnings. + """ + + class WarningCode(proto.Enum): + r"""Different types of warnings that can be returned to the user. The + display_message contains detailed information regarding the + warning_code. + + Values: + WARNING_CODE_UNSPECIFIED (0): + Default value. This value is unused. + INSUFFICIENT_PERMISSIONS_PARTIAL_DATA (1): + Indicates that the caller or service agent lacks necessary + permissions to view some of the requested data. The response + may be partial. Example: + + - KMS organization service agent {service_agent_name} lacks + the ``cloudasset.assets.searchAllResources`` permission on + the scope. + RESOURCE_LIMIT_EXCEEDED_PARTIAL_DATA (2): + Indicates that a resource limit has been + exceeded, resulting in partial data. Example: + + - The project has more than 10,000 assets + (resources, crypto keys, key handles, IAM + policies, etc). + ORG_LESS_PROJECT_PARTIAL_DATA (3): + Indicates that the project exists outside of + an organization resource. Thus the analysis is + only done for the project level data and results + might be partial. + """ + WARNING_CODE_UNSPECIFIED = 0 + INSUFFICIENT_PERMISSIONS_PARTIAL_DATA = 1 + RESOURCE_LIMIT_EXCEEDED_PARTIAL_DATA = 2 + ORG_LESS_PROJECT_PARTIAL_DATA = 3 + + warning_code: WarningCode = proto.Field( + proto.ENUM, + number=1, + enum=WarningCode, + ) + display_message: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py index 751f2b3d2ab6..66e93ce658fc 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py @@ -2332,6 +2332,8 @@ def test_get_protected_resources_summary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_protected_resources_summary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("fallback_scope",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2390,7 +2392,7 @@ def test_get_protected_resources_summary_rest_unset_required_fields(): unset_fields = transport.get_protected_resources_summary._get_unset_required_fields( {} ) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("fallbackScope",)) & set(("name",))) def test_get_protected_resources_summary_rest_flattened(): @@ -3786,11 +3788,31 @@ def test_parse_crypto_key_version_path(): assert expected == actual +def test_protected_resource_scope_path(): + organization = "scallop" + expected = "organizations/{organization}/protectedResourceScope".format( + organization=organization, + ) + actual = KeyTrackingServiceClient.protected_resource_scope_path(organization) + assert expected == actual + + +def test_parse_protected_resource_scope_path(): + expected = { + "organization": "abalone", + } + path = KeyTrackingServiceClient.protected_resource_scope_path(**expected) + + # Check that the path construction is reversible. + actual = KeyTrackingServiceClient.parse_protected_resource_scope_path(path) + assert expected == actual + + def test_protected_resources_summary_path(): - project = "scallop" - location = "abalone" - key_ring = "squid" - crypto_key = "clam" + project = "squid" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/protectedResourcesSummary".format( project=project, location=location, @@ -3805,10 +3827,10 @@ def test_protected_resources_summary_path(): def test_parse_protected_resources_summary_path(): expected = { - "project": "whelk", - "location": "octopus", - "key_ring": "oyster", - "crypto_key": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", } path = KeyTrackingServiceClient.protected_resources_summary_path(**expected) @@ -3818,7 +3840,7 @@ def test_parse_protected_resources_summary_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3828,7 +3850,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = KeyTrackingServiceClient.common_billing_account_path(**expected) @@ -3838,7 +3860,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -3848,7 +3870,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = KeyTrackingServiceClient.common_folder_path(**expected) @@ -3858,7 +3880,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -3868,7 +3890,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = KeyTrackingServiceClient.common_organization_path(**expected) @@ -3878,7 +3900,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -3888,7 +3910,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = KeyTrackingServiceClient.common_project_path(**expected) @@ -3898,8 +3920,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3910,8 +3932,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = KeyTrackingServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-kms/google/cloud/kms/__init__.py b/packages/google-cloud-kms/google/cloud/kms/__init__.py index 922e16b10a52..6cd7372115fc 100644 --- a/packages/google-cloud-kms/google/cloud/kms/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms/__init__.py @@ -100,6 +100,7 @@ KeyRing, ProtectionLevel, PublicKey, + RetiredResource, ) from google.cloud.kms_v1.types.service import ( AsymmetricDecryptRequest, @@ -114,6 +115,10 @@ DecapsulateResponse, DecryptRequest, DecryptResponse, + DeleteCryptoKeyMetadata, + DeleteCryptoKeyRequest, + DeleteCryptoKeyVersionMetadata, + DeleteCryptoKeyVersionRequest, DestroyCryptoKeyVersionRequest, Digest, EncryptRequest, @@ -125,6 +130,7 @@ GetImportJobRequest, GetKeyRingRequest, GetPublicKeyRequest, + GetRetiredResourceRequest, ImportCryptoKeyVersionRequest, ListCryptoKeysRequest, ListCryptoKeysResponse, @@ -134,6 +140,8 @@ ListImportJobsResponse, ListKeyRingsRequest, ListKeyRingsResponse, + ListRetiredResourcesRequest, + ListRetiredResourcesResponse, LocationMetadata, MacSignRequest, MacSignResponse, @@ -213,6 +221,7 @@ "KeyOperationAttestation", "KeyRing", "PublicKey", + "RetiredResource", "AccessReason", "ProtectionLevel", "AsymmetricDecryptRequest", @@ -227,6 +236,10 @@ "DecapsulateResponse", "DecryptRequest", "DecryptResponse", + "DeleteCryptoKeyMetadata", + "DeleteCryptoKeyRequest", + "DeleteCryptoKeyVersionMetadata", + "DeleteCryptoKeyVersionRequest", "DestroyCryptoKeyVersionRequest", "Digest", "EncryptRequest", @@ -238,6 +251,7 @@ "GetImportJobRequest", "GetKeyRingRequest", "GetPublicKeyRequest", + "GetRetiredResourceRequest", "ImportCryptoKeyVersionRequest", "ListCryptoKeysRequest", "ListCryptoKeysResponse", @@ -247,6 +261,8 @@ "ListImportJobsResponse", "ListKeyRingsRequest", "ListKeyRingsResponse", + "ListRetiredResourcesRequest", + "ListRetiredResourcesResponse", "LocationMetadata", "MacSignRequest", "MacSignResponse", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py b/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py index c0d6c2ba0736..9d12f69e43d6 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/__init__.py @@ -100,6 +100,7 @@ KeyRing, ProtectionLevel, PublicKey, + RetiredResource, ) from .types.service import ( AsymmetricDecryptRequest, @@ -114,6 +115,10 @@ DecapsulateResponse, DecryptRequest, DecryptResponse, + DeleteCryptoKeyMetadata, + DeleteCryptoKeyRequest, + DeleteCryptoKeyVersionMetadata, + DeleteCryptoKeyVersionRequest, DestroyCryptoKeyVersionRequest, Digest, EncryptRequest, @@ -125,6 +130,7 @@ GetImportJobRequest, GetKeyRingRequest, GetPublicKeyRequest, + GetRetiredResourceRequest, ImportCryptoKeyVersionRequest, ListCryptoKeysRequest, ListCryptoKeysResponse, @@ -134,6 +140,8 @@ ListImportJobsResponse, ListKeyRingsRequest, ListKeyRingsResponse, + ListRetiredResourcesRequest, + ListRetiredResourcesResponse, LocationMetadata, MacSignRequest, MacSignResponse, @@ -281,6 +289,10 @@ def _get_version(dependency_name): "DecapsulateResponse", "DecryptRequest", "DecryptResponse", + "DeleteCryptoKeyMetadata", + "DeleteCryptoKeyRequest", + "DeleteCryptoKeyVersionMetadata", + "DeleteCryptoKeyVersionRequest", "DeleteSingleTenantHsmInstanceProposalRequest", "DestroyCryptoKeyVersionRequest", "Digest", @@ -304,6 +316,7 @@ def _get_version(dependency_name): "GetKeyHandleRequest", "GetKeyRingRequest", "GetPublicKeyRequest", + "GetRetiredResourceRequest", "GetSingleTenantHsmInstanceProposalRequest", "GetSingleTenantHsmInstanceRequest", "HsmManagementClient", @@ -326,6 +339,8 @@ def _get_version(dependency_name): "ListKeyHandlesResponse", "ListKeyRingsRequest", "ListKeyRingsResponse", + "ListRetiredResourcesRequest", + "ListRetiredResourcesResponse", "ListSingleTenantHsmInstanceProposalsRequest", "ListSingleTenantHsmInstanceProposalsResponse", "ListSingleTenantHsmInstancesRequest", @@ -342,6 +357,7 @@ def _get_version(dependency_name): "RawEncryptRequest", "RawEncryptResponse", "RestoreCryptoKeyVersionRequest", + "RetiredResource", "ShowEffectiveAutokeyConfigRequest", "ShowEffectiveAutokeyConfigResponse", "SingleTenantHsmInstance", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_metadata.json b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_metadata.json index 2de614e2691f..df16c1e4c878 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_metadata.json +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_metadata.json @@ -456,6 +456,16 @@ "decrypt" ] }, + "DeleteCryptoKey": { + "methods": [ + "delete_crypto_key" + ] + }, + "DeleteCryptoKeyVersion": { + "methods": [ + "delete_crypto_key_version" + ] + }, "DestroyCryptoKeyVersion": { "methods": [ "destroy_crypto_key_version" @@ -496,6 +506,11 @@ "get_public_key" ] }, + "GetRetiredResource": { + "methods": [ + "get_retired_resource" + ] + }, "ImportCryptoKeyVersion": { "methods": [ "import_crypto_key_version" @@ -521,6 +536,11 @@ "list_key_rings" ] }, + "ListRetiredResources": { + "methods": [ + "list_retired_resources" + ] + }, "MacSign": { "methods": [ "mac_sign" @@ -606,6 +626,16 @@ "decrypt" ] }, + "DeleteCryptoKey": { + "methods": [ + "delete_crypto_key" + ] + }, + "DeleteCryptoKeyVersion": { + "methods": [ + "delete_crypto_key_version" + ] + }, "DestroyCryptoKeyVersion": { "methods": [ "destroy_crypto_key_version" @@ -646,6 +676,11 @@ "get_public_key" ] }, + "GetRetiredResource": { + "methods": [ + "get_retired_resource" + ] + }, "ImportCryptoKeyVersion": { "methods": [ "import_crypto_key_version" @@ -671,6 +706,11 @@ "list_key_rings" ] }, + "ListRetiredResources": { + "methods": [ + "list_retired_resources" + ] + }, "MacSign": { "methods": [ "mac_sign" @@ -756,6 +796,16 @@ "decrypt" ] }, + "DeleteCryptoKey": { + "methods": [ + "delete_crypto_key" + ] + }, + "DeleteCryptoKeyVersion": { + "methods": [ + "delete_crypto_key_version" + ] + }, "DestroyCryptoKeyVersion": { "methods": [ "destroy_crypto_key_version" @@ -796,6 +846,11 @@ "get_public_key" ] }, + "GetRetiredResource": { + "methods": [ + "get_retired_resource" + ] + }, "ImportCryptoKeyVersion": { "methods": [ "import_crypto_key_version" @@ -821,6 +876,11 @@ "list_key_rings" ] }, + "ListRetiredResources": { + "methods": [ + "list_retired_resources" + ] + }, "MacSign": { "methods": [ "mac_sign" diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py index 09f15d07c4c9..6d16a7052111 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/async_client.py @@ -69,13 +69,15 @@ class AutokeyAdminAsyncClient: """Provides interfaces for managing `Cloud KMS Autokey `__ folder-level - configurations. A configuration is inherited by all descendent - projects. A configuration at one folder overrides any other - configurations in its ancestry. Setting a configuration on a folder - is a prerequisite for Cloud KMS Autokey, so that users working in a - descendant project can request provisioned - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer - Managed Encryption Key (CMEK) use, on-demand. + or project-level configurations. A configuration is inherited by all + descendent folders and projects. A configuration at a folder or + project overrides any other configurations in its ancestry. Setting + a configuration on a folder is a prerequisite for Cloud KMS Autokey, + so that users working in a descendant project can request + provisioned [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for + Customer Managed Encryption Key (CMEK) use, on-demand when using the + dedicated key project mode. This is not required when using the + delegated key management mode for same-project keys. """ _client: AutokeyAdminClient @@ -310,7 +312,7 @@ async def update_autokey_config( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> autokey_admin.AutokeyConfig: r"""Updates the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. The caller must have both + for a folder or a project. The caller must have both ``cloudkms.autokeyConfigs.update`` permission on the parent folder and ``cloudkms.cryptoKeys.setIamPolicy`` permission on the provided key project. A @@ -441,7 +443,7 @@ async def get_autokey_config( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> autokey_admin.AutokeyConfig: r"""Returns the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. + for a folder or project. .. code-block:: python @@ -476,8 +478,8 @@ async def sample_get_autokey_config(): name (:class:`str`): Required. Name of the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - resource, e.g. - ``folders/{FOLDER_NUMBER}/autokeyConfig``. + resource, e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig`` + or ``projects/{PROJECT_NUMBER}/autokeyConfig``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py index b94af893b3a7..e1efef5c9386 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py @@ -113,13 +113,15 @@ def get_transport_class( class AutokeyAdminClient(metaclass=AutokeyAdminClientMeta): """Provides interfaces for managing `Cloud KMS Autokey `__ folder-level - configurations. A configuration is inherited by all descendent - projects. A configuration at one folder overrides any other - configurations in its ancestry. Setting a configuration on a folder - is a prerequisite for Cloud KMS Autokey, so that users working in a - descendant project can request provisioned - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer - Managed Encryption Key (CMEK) use, on-demand. + or project-level configurations. A configuration is inherited by all + descendent folders and projects. A configuration at a folder or + project overrides any other configurations in its ancestry. Setting + a configuration on a folder is a prerequisite for Cloud KMS Autokey, + so that users working in a descendant project can request + provisioned [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for + Customer Managed Encryption Key (CMEK) use, on-demand when using the + dedicated key project mode. This is not required when using the + delegated key management mode for same-project keys. """ @staticmethod @@ -741,7 +743,7 @@ def update_autokey_config( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> autokey_admin.AutokeyConfig: r"""Updates the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. The caller must have both + for a folder or a project. The caller must have both ``cloudkms.autokeyConfigs.update`` permission on the parent folder and ``cloudkms.cryptoKeys.setIamPolicy`` permission on the provided key project. A @@ -869,7 +871,7 @@ def get_autokey_config( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> autokey_admin.AutokeyConfig: r"""Returns the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. + for a folder or project. .. code-block:: python @@ -904,8 +906,8 @@ def sample_get_autokey_config(): name (str): Required. Name of the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - resource, e.g. - ``folders/{FOLDER_NUMBER}/autokeyConfig``. + resource, e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig`` + or ``projects/{PROJECT_NUMBER}/autokeyConfig``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py index 7c74a5d8b92f..f77e57918ed7 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc.py @@ -116,13 +116,15 @@ class AutokeyAdminGrpcTransport(AutokeyAdminTransport): Provides interfaces for managing `Cloud KMS Autokey `__ folder-level - configurations. A configuration is inherited by all descendent - projects. A configuration at one folder overrides any other - configurations in its ancestry. Setting a configuration on a folder - is a prerequisite for Cloud KMS Autokey, so that users working in a - descendant project can request provisioned - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer - Managed Encryption Key (CMEK) use, on-demand. + or project-level configurations. A configuration is inherited by all + descendent folders and projects. A configuration at a folder or + project overrides any other configurations in its ancestry. Setting + a configuration on a folder is a prerequisite for Cloud KMS Autokey, + so that users working in a descendant project can request + provisioned [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for + Customer Managed Encryption Key (CMEK) use, on-demand when using the + dedicated key project mode. This is not required when using the + delegated key management mode for same-project keys. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -342,7 +344,7 @@ def update_autokey_config( r"""Return a callable for the update autokey config method over gRPC. Updates the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. The caller must have both + for a folder or a project. The caller must have both ``cloudkms.autokeyConfigs.update`` permission on the parent folder and ``cloudkms.cryptoKeys.setIamPolicy`` permission on the provided key project. A @@ -376,7 +378,7 @@ def get_autokey_config( r"""Return a callable for the get autokey config method over gRPC. Returns the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. + for a folder or project. Returns: Callable[[~.GetAutokeyConfigRequest], diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py index 5df7dd62d0e8..c78fe8a38398 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/grpc_asyncio.py @@ -122,13 +122,15 @@ class AutokeyAdminGrpcAsyncIOTransport(AutokeyAdminTransport): Provides interfaces for managing `Cloud KMS Autokey `__ folder-level - configurations. A configuration is inherited by all descendent - projects. A configuration at one folder overrides any other - configurations in its ancestry. Setting a configuration on a folder - is a prerequisite for Cloud KMS Autokey, so that users working in a - descendant project can request provisioned - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer - Managed Encryption Key (CMEK) use, on-demand. + or project-level configurations. A configuration is inherited by all + descendent folders and projects. A configuration at a folder or + project overrides any other configurations in its ancestry. Setting + a configuration on a folder is a prerequisite for Cloud KMS Autokey, + so that users working in a descendant project can request + provisioned [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for + Customer Managed Encryption Key (CMEK) use, on-demand when using the + dedicated key project mode. This is not required when using the + delegated key management mode for same-project keys. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -351,7 +353,7 @@ def update_autokey_config( r"""Return a callable for the update autokey config method over gRPC. Updates the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. The caller must have both + for a folder or a project. The caller must have both ``cloudkms.autokeyConfigs.update`` permission on the parent folder and ``cloudkms.cryptoKeys.setIamPolicy`` permission on the provided key project. A @@ -387,7 +389,7 @@ def get_autokey_config( r"""Return a callable for the get autokey config method over gRPC. Returns the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] - for a folder. + for a folder or project. Returns: Callable[[~.GetAutokeyConfigRequest], diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py index d6e6b91f19f2..be6e9126afc9 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py @@ -415,13 +415,15 @@ class AutokeyAdminRestTransport(_BaseAutokeyAdminRestTransport): Provides interfaces for managing `Cloud KMS Autokey `__ folder-level - configurations. A configuration is inherited by all descendent - projects. A configuration at one folder overrides any other - configurations in its ancestry. Setting a configuration on a folder - is a prerequisite for Cloud KMS Autokey, so that users working in a - descendant project can request provisioned - [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for Customer - Managed Encryption Key (CMEK) use, on-demand. + or project-level configurations. A configuration is inherited by all + descendent folders and projects. A configuration at a folder or + project overrides any other configurations in its ancestry. Setting + a configuration on a folder is a prerequisite for Cloud KMS Autokey, + so that users working in a descendant project can request + provisioned [CryptoKeys][google.cloud.kms.v1.CryptoKey], ready for + Customer Managed Encryption Key (CMEK) use, on-demand when using the + dedicated key project mode. This is not required when using the + delegated key management mode for same-project keys. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest_base.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest_base.py index 7a05103d4187..2879f570237b 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest_base.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest_base.py @@ -112,6 +112,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=folders/*/autokeyConfig}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/autokeyConfig}", + }, ] return http_options @@ -209,6 +213,11 @@ def _get_http_options(): "uri": "/v1/{autokey_config.name=folders/*/autokeyConfig}", "body": "autokey_config", }, + { + "method": "patch", + "uri": "/v1/{autokey_config.name=projects/*/autokeyConfig}", + "body": "autokey_config", + }, ] return http_options diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py index 49e2e7f61d3d..26ade2abf105 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/async_client.py @@ -44,11 +44,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +import google.api_core.operation as operation # type: ignore +import google.api_core.operation_async as operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import google.protobuf.duration_pb2 as duration_pb2 # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.protobuf.wrappers_pb2 as wrappers_pb2 # type: ignore @@ -114,6 +117,12 @@ class KeyManagementServiceAsyncClient: parse_public_key_path = staticmethod( KeyManagementServiceClient.parse_public_key_path ) + retired_resource_path = staticmethod( + KeyManagementServiceClient.retired_resource_path + ) + parse_retired_resource_path = staticmethod( + KeyManagementServiceClient.parse_retired_resource_path + ) common_billing_account_path = staticmethod( KeyManagementServiceClient.common_billing_account_path ) @@ -839,6 +848,137 @@ async def sample_list_import_jobs(): # Done; return the response. return response + async def list_retired_resources( + self, + request: Optional[Union[service.ListRetiredResourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListRetiredResourcesAsyncPager: + r"""Lists the + [RetiredResources][google.cloud.kms.v1.RetiredResource] which + are the records of deleted + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. RetiredResources + prevent the reuse of these resource names after deletion. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + async def sample_list_retired_resources(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.ListRetiredResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_retired_resources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.kms_v1.types.ListRetiredResourcesRequest, dict]]): + The request object. Request message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + parent (:class:`str`): + Required. The project-specific location holding the + [RetiredResources][google.cloud.kms.v1.RetiredResource], + in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.kms_v1.services.key_management_service.pagers.ListRetiredResourcesAsyncPager: + Response message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListRetiredResourcesRequest): + request = service.ListRetiredResourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_retired_resources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRetiredResourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_key_ring( self, request: Optional[Union[service.GetKeyRingRequest, dict]] = None, @@ -1476,6 +1616,125 @@ async def sample_get_import_job(): # Done; return the response. return response + async def get_retired_resource( + self, + request: Optional[Union[service.GetRetiredResourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.RetiredResource: + r"""Retrieves a specific + [RetiredResource][google.cloud.kms.v1.RetiredResource] resource, + which represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + async def sample_get_retired_resource(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.GetRetiredResourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_retired_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.kms_v1.types.GetRetiredResourceRequest, dict]]): + The request object. Request message for + [KeyManagementService.GetRetiredResource][google.cloud.kms.v1.KeyManagementService.GetRetiredResource]. + name (:class:`str`): + Required. The + [name][google.cloud.kms.v1.RetiredResource.name] of the + [RetiredResource][google.cloud.kms.v1.RetiredResource] + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.kms_v1.types.RetiredResource: + A RetiredResource resource represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. Its + purpose is to provide visibility into retained user + data and to prevent reuse of these names for new + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetRetiredResourceRequest): + request = service.GetRetiredResourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_retired_resource + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_key_ring( self, request: Optional[Union[service.CreateKeyRingRequest, dict]] = None, @@ -1897,6 +2156,292 @@ async def sample_create_crypto_key_version(): # Done; return the response. return response + async def delete_crypto_key( + self, + request: Optional[Union[service.DeleteCryptoKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes the given + [CryptoKey][google.cloud.kms.v1.CryptoKey]. All child + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] must + have been previously deleted using + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + The specified crypto key will be immediately and permanently + deleted upon calling this method. This action cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + async def sample_delete_crypto_key(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.kms_v1.types.DeleteCryptoKeyRequest, dict]]): + The request object. Request message for + [KeyManagementService.DeleteCryptoKey][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey]. + name (:class:`str`): + Required. The [name][google.cloud.kms.v1.CryptoKey.name] + of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteCryptoKeyRequest): + request = service.DeleteCryptoKeyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_crypto_key + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.DeleteCryptoKeyMetadata, + ) + + # Done; return the response. + return response + + async def delete_crypto_key_version( + self, + request: Optional[Union[service.DeleteCryptoKeyVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes the given + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Only + possible if the version has not been previously imported and if + its [state][google.cloud.kms.v1.CryptoKeyVersion.state] is one + of [DESTROYED][CryptoKeyVersionState.DESTROYED], + [IMPORT_FAILED][CryptoKeyVersionState.IMPORT_FAILED], or + [GENERATION_FAILED][CryptoKeyVersionState.GENERATION_FAILED]. + Successfully imported + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] cannot + be deleted at this time. The specified version will be + immediately and permanently deleted upon calling this method. + This action cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + async def sample_delete_crypto_key_version(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key_version(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.kms_v1.types.DeleteCryptoKeyVersionRequest, dict]]): + The request object. Request message for + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + name (:class:`str`): + Required. The + [name][google.cloud.kms.v1.CryptoKeyVersion.name] of the + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] + to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteCryptoKeyVersionRequest): + request = service.DeleteCryptoKeyVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_crypto_key_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.DeleteCryptoKeyVersionMetadata, + ) + + # Done; return the response. + return response + async def import_crypto_key_version( self, request: Optional[Union[service.ImportCryptoKeyVersionRequest, dict]] = None, @@ -1935,7 +2480,7 @@ async def sample_import_crypto_key_version(): request = kms_v1.ImportCryptoKeyVersionRequest( rsa_aes_wrapped_key=b'rsa_aes_wrapped_key_blob', parent="parent_value", - algorithm="PQ_SIGN_SLH_DSA_SHA2_128S", + algorithm="PQ_SIGN_ML_DSA_87_EXTERNAL_MU", import_job="import_job_value", ) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py index 9935bdabafcb..8fc9ef7573e7 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py @@ -61,11 +61,14 @@ _LOGGER = std_logging.getLogger(__name__) +import google.api_core.operation as operation # type: ignore +import google.api_core.operation_async as operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import google.protobuf.duration_pb2 as duration_pb2 # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.protobuf.wrappers_pb2 as wrappers_pb2 # type: ignore @@ -367,6 +370,28 @@ def parse_public_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def retired_resource_path( + project: str, + location: str, + retired_resource: str, + ) -> str: + """Returns a fully-qualified retired_resource string.""" + return "projects/{project}/locations/{location}/retiredResources/{retired_resource}".format( + project=project, + location=location, + retired_resource=retired_resource, + ) + + @staticmethod + def parse_retired_resource_path(path: str) -> Dict[str, str]: + """Parses a retired_resource path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/retiredResources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -1351,6 +1376,134 @@ def sample_list_import_jobs(): # Done; return the response. return response + def list_retired_resources( + self, + request: Optional[Union[service.ListRetiredResourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListRetiredResourcesPager: + r"""Lists the + [RetiredResources][google.cloud.kms.v1.RetiredResource] which + are the records of deleted + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. RetiredResources + prevent the reuse of these resource names after deletion. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + def sample_list_retired_resources(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.ListRetiredResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_retired_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.kms_v1.types.ListRetiredResourcesRequest, dict]): + The request object. Request message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + parent (str): + Required. The project-specific location holding the + [RetiredResources][google.cloud.kms.v1.RetiredResource], + in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.kms_v1.services.key_management_service.pagers.ListRetiredResourcesPager: + Response message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListRetiredResourcesRequest): + request = service.ListRetiredResourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_retired_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRetiredResourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_key_ring( self, request: Optional[Union[service.GetKeyRingRequest, dict]] = None, @@ -1973,6 +2126,122 @@ def sample_get_import_job(): # Done; return the response. return response + def get_retired_resource( + self, + request: Optional[Union[service.GetRetiredResourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.RetiredResource: + r"""Retrieves a specific + [RetiredResource][google.cloud.kms.v1.RetiredResource] resource, + which represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + def sample_get_retired_resource(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.GetRetiredResourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_retired_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.kms_v1.types.GetRetiredResourceRequest, dict]): + The request object. Request message for + [KeyManagementService.GetRetiredResource][google.cloud.kms.v1.KeyManagementService.GetRetiredResource]. + name (str): + Required. The + [name][google.cloud.kms.v1.RetiredResource.name] of the + [RetiredResource][google.cloud.kms.v1.RetiredResource] + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.kms_v1.types.RetiredResource: + A RetiredResource resource represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. Its + purpose is to provide visibility into retained user + data and to prevent reuse of these names for new + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetRetiredResourceRequest): + request = service.GetRetiredResourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_retired_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_key_ring( self, request: Optional[Union[service.CreateKeyRingRequest, dict]] = None, @@ -2387,6 +2656,288 @@ def sample_create_crypto_key_version(): # Done; return the response. return response + def delete_crypto_key( + self, + request: Optional[Union[service.DeleteCryptoKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Permanently deletes the given + [CryptoKey][google.cloud.kms.v1.CryptoKey]. All child + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] must + have been previously deleted using + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + The specified crypto key will be immediately and permanently + deleted upon calling this method. This action cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + def sample_delete_crypto_key(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.kms_v1.types.DeleteCryptoKeyRequest, dict]): + The request object. Request message for + [KeyManagementService.DeleteCryptoKey][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey]. + name (str): + Required. The [name][google.cloud.kms.v1.CryptoKey.name] + of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteCryptoKeyRequest): + request = service.DeleteCryptoKeyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_crypto_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.DeleteCryptoKeyMetadata, + ) + + # Done; return the response. + return response + + def delete_crypto_key_version( + self, + request: Optional[Union[service.DeleteCryptoKeyVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Permanently deletes the given + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Only + possible if the version has not been previously imported and if + its [state][google.cloud.kms.v1.CryptoKeyVersion.state] is one + of [DESTROYED][CryptoKeyVersionState.DESTROYED], + [IMPORT_FAILED][CryptoKeyVersionState.IMPORT_FAILED], or + [GENERATION_FAILED][CryptoKeyVersionState.GENERATION_FAILED]. + Successfully imported + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] cannot + be deleted at this time. The specified version will be + immediately and permanently deleted upon calling this method. + This action cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import kms_v1 + + def sample_delete_crypto_key_version(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key_version(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.kms_v1.types.DeleteCryptoKeyVersionRequest, dict]): + The request object. Request message for + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + name (str): + Required. The + [name][google.cloud.kms.v1.CryptoKeyVersion.name] of the + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] + to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteCryptoKeyVersionRequest): + request = service.DeleteCryptoKeyVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_crypto_key_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.DeleteCryptoKeyVersionMetadata, + ) + + # Done; return the response. + return response + def import_crypto_key_version( self, request: Optional[Union[service.ImportCryptoKeyVersionRequest, dict]] = None, @@ -2425,7 +2976,7 @@ def sample_import_crypto_key_version(): request = kms_v1.ImportCryptoKeyVersionRequest( rsa_aes_wrapped_key=b'rsa_aes_wrapped_key_blob', parent="parent_value", - algorithm="PQ_SIGN_SLH_DSA_SHA2_128S", + algorithm="PQ_SIGN_ML_DSA_87_EXTERNAL_MU", import_job="import_job_value", ) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/pagers.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/pagers.py index ca1df0009940..b929a40f457c 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/pagers.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/pagers.py @@ -663,3 +663,159 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRetiredResourcesPager: + """A pager for iterating through ``list_retired_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListRetiredResourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``retired_resources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRetiredResources`` requests and continue to iterate + through the ``retired_resources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListRetiredResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListRetiredResourcesResponse], + request: service.ListRetiredResourcesRequest, + response: service.ListRetiredResourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListRetiredResourcesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListRetiredResourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListRetiredResourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListRetiredResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.RetiredResource]: + for page in self.pages: + yield from page.retired_resources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRetiredResourcesAsyncPager: + """A pager for iterating through ``list_retired_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.kms_v1.types.ListRetiredResourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``retired_resources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRetiredResources`` requests and continue to iterate + through the ``retired_resources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.kms_v1.types.ListRetiredResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListRetiredResourcesResponse]], + request: service.ListRetiredResourcesRequest, + response: service.ListRetiredResourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.kms_v1.types.ListRetiredResourcesRequest): + The initial request object. + response (google.cloud.kms_v1.types.ListRetiredResourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListRetiredResourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListRetiredResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.RetiredResource]: + async def async_generator(): + async for page in self.pages: + for response in page.retired_resources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/base.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/base.py index a3b748efb138..0488271b2e29 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/base.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/base.py @@ -18,7 +18,7 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -203,6 +203,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_retired_resources: gapic_v1.method.wrap_method( + self.list_retired_resources, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_key_ring: gapic_v1.method.wrap_method( self.get_key_ring, default_retry=retries.Retry( @@ -278,6 +293,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_retired_resource: gapic_v1.method.wrap_method( + self.get_retired_resource, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.create_key_ring: gapic_v1.method.wrap_method( self.create_key_ring, default_retry=retries.Retry( @@ -313,6 +343,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.delete_crypto_key: gapic_v1.method.wrap_method( + self.delete_crypto_key, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_crypto_key_version: gapic_v1.method.wrap_method( + self.delete_crypto_key_version, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.import_crypto_key_version: gapic_v1.method.wrap_method( self.import_crypto_key_version, default_timeout=60.0, @@ -569,6 +629,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_key_rings( self, @@ -612,6 +677,18 @@ def list_import_jobs( ]: raise NotImplementedError() + @property + def list_retired_resources( + self, + ) -> Callable[ + [service.ListRetiredResourcesRequest], + Union[ + service.ListRetiredResourcesResponse, + Awaitable[service.ListRetiredResourcesResponse], + ], + ]: + raise NotImplementedError() + @property def get_key_ring( self, @@ -657,6 +734,15 @@ def get_import_job( ]: raise NotImplementedError() + @property + def get_retired_resource( + self, + ) -> Callable[ + [service.GetRetiredResourceRequest], + Union[resources.RetiredResource, Awaitable[resources.RetiredResource]], + ]: + raise NotImplementedError() + @property def create_key_ring( self, @@ -684,6 +770,24 @@ def create_crypto_key_version( ]: raise NotImplementedError() + @property + def delete_crypto_key( + self, + ) -> Callable[ + [service.DeleteCryptoKeyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_crypto_key_version( + self, + ) -> Callable[ + [service.DeleteCryptoKeyVersionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_crypto_key_version( self, diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc.py index 69e29d87f76d..85cee384741e 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc.py @@ -19,7 +19,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -209,6 +209,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -336,6 +337,22 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_key_rings( self, @@ -442,6 +459,38 @@ def list_import_jobs( ) return self._stubs["list_import_jobs"] + @property + def list_retired_resources( + self, + ) -> Callable[ + [service.ListRetiredResourcesRequest], service.ListRetiredResourcesResponse + ]: + r"""Return a callable for the list retired resources method over gRPC. + + Lists the + [RetiredResources][google.cloud.kms.v1.RetiredResource] which + are the records of deleted + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. RetiredResources + prevent the reuse of these resource names after deletion. + + Returns: + Callable[[~.ListRetiredResourcesRequest], + ~.ListRetiredResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_retired_resources" not in self._stubs: + self._stubs["list_retired_resources"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/ListRetiredResources", + request_serializer=service.ListRetiredResourcesRequest.serialize, + response_deserializer=service.ListRetiredResourcesResponse.deserialize, + ) + return self._stubs["list_retired_resources"] + @property def get_key_ring(self) -> Callable[[service.GetKeyRingRequest], resources.KeyRing]: r"""Return a callable for the get key ring method over gRPC. @@ -582,6 +631,35 @@ def get_import_job( ) return self._stubs["get_import_job"] + @property + def get_retired_resource( + self, + ) -> Callable[[service.GetRetiredResourceRequest], resources.RetiredResource]: + r"""Return a callable for the get retired resource method over gRPC. + + Retrieves a specific + [RetiredResource][google.cloud.kms.v1.RetiredResource] resource, + which represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. + + Returns: + Callable[[~.GetRetiredResourceRequest], + ~.RetiredResource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_retired_resource" not in self._stubs: + self._stubs["get_retired_resource"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/GetRetiredResource", + request_serializer=service.GetRetiredResourceRequest.serialize, + response_deserializer=resources.RetiredResource.deserialize, + ) + return self._stubs["get_retired_resource"] + @property def create_key_ring( self, @@ -673,6 +751,75 @@ def create_crypto_key_version( ) return self._stubs["create_crypto_key_version"] + @property + def delete_crypto_key( + self, + ) -> Callable[[service.DeleteCryptoKeyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete crypto key method over gRPC. + + Permanently deletes the given + [CryptoKey][google.cloud.kms.v1.CryptoKey]. All child + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] must + have been previously deleted using + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + The specified crypto key will be immediately and permanently + deleted upon calling this method. This action cannot be undone. + + Returns: + Callable[[~.DeleteCryptoKeyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_crypto_key" not in self._stubs: + self._stubs["delete_crypto_key"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/DeleteCryptoKey", + request_serializer=service.DeleteCryptoKeyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_crypto_key"] + + @property + def delete_crypto_key_version( + self, + ) -> Callable[[service.DeleteCryptoKeyVersionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete crypto key version method over gRPC. + + Permanently deletes the given + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Only + possible if the version has not been previously imported and if + its [state][google.cloud.kms.v1.CryptoKeyVersion.state] is one + of [DESTROYED][CryptoKeyVersionState.DESTROYED], + [IMPORT_FAILED][CryptoKeyVersionState.IMPORT_FAILED], or + [GENERATION_FAILED][CryptoKeyVersionState.GENERATION_FAILED]. + Successfully imported + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] cannot + be deleted at this time. The specified version will be + immediately and permanently deleted upon calling this method. + This action cannot be undone. + + Returns: + Callable[[~.DeleteCryptoKeyVersionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_crypto_key_version" not in self._stubs: + self._stubs["delete_crypto_key_version"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/DeleteCryptoKeyVersion", + request_serializer=service.DeleteCryptoKeyVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_crypto_key_version"] + @property def import_crypto_key_version( self, diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc_asyncio.py index 2a0796c3ea7b..f2e8e1304aae 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ import warnings from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -260,6 +260,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -344,6 +345,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_key_rings( self, @@ -457,6 +474,39 @@ def list_import_jobs( ) return self._stubs["list_import_jobs"] + @property + def list_retired_resources( + self, + ) -> Callable[ + [service.ListRetiredResourcesRequest], + Awaitable[service.ListRetiredResourcesResponse], + ]: + r"""Return a callable for the list retired resources method over gRPC. + + Lists the + [RetiredResources][google.cloud.kms.v1.RetiredResource] which + are the records of deleted + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. RetiredResources + prevent the reuse of these resource names after deletion. + + Returns: + Callable[[~.ListRetiredResourcesRequest], + Awaitable[~.ListRetiredResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_retired_resources" not in self._stubs: + self._stubs["list_retired_resources"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/ListRetiredResources", + request_serializer=service.ListRetiredResourcesRequest.serialize, + response_deserializer=service.ListRetiredResourcesResponse.deserialize, + ) + return self._stubs["list_retired_resources"] + @property def get_key_ring( self, @@ -601,6 +651,37 @@ def get_import_job( ) return self._stubs["get_import_job"] + @property + def get_retired_resource( + self, + ) -> Callable[ + [service.GetRetiredResourceRequest], Awaitable[resources.RetiredResource] + ]: + r"""Return a callable for the get retired resource method over gRPC. + + Retrieves a specific + [RetiredResource][google.cloud.kms.v1.RetiredResource] resource, + which represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. + + Returns: + Callable[[~.GetRetiredResourceRequest], + Awaitable[~.RetiredResource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_retired_resource" not in self._stubs: + self._stubs["get_retired_resource"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/GetRetiredResource", + request_serializer=service.GetRetiredResourceRequest.serialize, + response_deserializer=resources.RetiredResource.deserialize, + ) + return self._stubs["get_retired_resource"] + @property def create_key_ring( self, @@ -694,6 +775,79 @@ def create_crypto_key_version( ) return self._stubs["create_crypto_key_version"] + @property + def delete_crypto_key( + self, + ) -> Callable[ + [service.DeleteCryptoKeyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete crypto key method over gRPC. + + Permanently deletes the given + [CryptoKey][google.cloud.kms.v1.CryptoKey]. All child + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] must + have been previously deleted using + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + The specified crypto key will be immediately and permanently + deleted upon calling this method. This action cannot be undone. + + Returns: + Callable[[~.DeleteCryptoKeyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_crypto_key" not in self._stubs: + self._stubs["delete_crypto_key"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/DeleteCryptoKey", + request_serializer=service.DeleteCryptoKeyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_crypto_key"] + + @property + def delete_crypto_key_version( + self, + ) -> Callable[ + [service.DeleteCryptoKeyVersionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete crypto key version method over gRPC. + + Permanently deletes the given + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Only + possible if the version has not been previously imported and if + its [state][google.cloud.kms.v1.CryptoKeyVersion.state] is one + of [DESTROYED][CryptoKeyVersionState.DESTROYED], + [IMPORT_FAILED][CryptoKeyVersionState.IMPORT_FAILED], or + [GENERATION_FAILED][CryptoKeyVersionState.GENERATION_FAILED]. + Successfully imported + [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] cannot + be deleted at this time. The specified version will be + immediately and permanently deleted upon calling this method. + This action cannot be undone. + + Returns: + Callable[[~.DeleteCryptoKeyVersionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_crypto_key_version" not in self._stubs: + self._stubs["delete_crypto_key_version"] = self._logged_channel.unary_unary( + "/google.cloud.kms.v1.KeyManagementService/DeleteCryptoKeyVersion", + request_serializer=service.DeleteCryptoKeyVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_crypto_key_version"] + @property def import_crypto_key_version( self, @@ -1332,6 +1486,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_retired_resources: self._wrap_method( + self.list_retired_resources, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_key_ring: self._wrap_method( self.get_key_ring, default_retry=retries.AsyncRetry( @@ -1407,6 +1576,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_retired_resource: self._wrap_method( + self.get_retired_resource, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.create_key_ring: self._wrap_method( self.create_key_ring, default_retry=retries.AsyncRetry( @@ -1442,6 +1626,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.delete_crypto_key: self._wrap_method( + self.delete_crypto_key, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_crypto_key_version: self._wrap_method( + self.delete_crypto_key_version, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.import_crypto_key_version: self._wrap_method( self.import_crypto_key_version, default_timeout=60.0, diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py index 7aa860c37cd2..dda358017ad1 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py @@ -19,8 +19,8 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, rest_helpers, rest_streaming from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore @@ -140,6 +140,22 @@ def post_decrypt(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_crypto_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_crypto_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_crypto_key_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_crypto_key_version(self, response): + logging.log(f"Received response: {response}") + return response + def pre_destroy_crypto_key_version(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -204,6 +220,14 @@ def post_get_public_key(self, response): logging.log(f"Received response: {response}") return response + def pre_get_retired_resource(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_retired_resource(self, response): + logging.log(f"Received response: {response}") + return response + def pre_import_crypto_key_version(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -244,6 +268,14 @@ def post_list_key_rings(self, response): logging.log(f"Received response: {response}") return response + def pre_list_retired_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_retired_resources(self, response): + logging.log(f"Received response: {response}") + return response + def pre_mac_sign(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -686,6 +718,100 @@ def post_decrypt_with_metadata( """ return response, metadata + def pre_delete_crypto_key( + self, + request: service.DeleteCryptoKeyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.DeleteCryptoKeyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_crypto_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the KeyManagementService server. + """ + return request, metadata + + def post_delete_crypto_key( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_crypto_key + + DEPRECATED. Please use the `post_delete_crypto_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the KeyManagementService server but before + it is returned to user code. This `post_delete_crypto_key` interceptor runs + before the `post_delete_crypto_key_with_metadata` interceptor. + """ + return response + + def post_delete_crypto_key_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_crypto_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_delete_crypto_key_with_metadata` + interceptor in new development instead of the `post_delete_crypto_key` interceptor. + When both interceptors are used, this `post_delete_crypto_key_with_metadata` interceptor runs after the + `post_delete_crypto_key` interceptor. The (possibly modified) response returned by + `post_delete_crypto_key` will be passed to + `post_delete_crypto_key_with_metadata`. + """ + return response, metadata + + def pre_delete_crypto_key_version( + self, + request: service.DeleteCryptoKeyVersionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.DeleteCryptoKeyVersionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_crypto_key_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the KeyManagementService server. + """ + return request, metadata + + def post_delete_crypto_key_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_crypto_key_version + + DEPRECATED. Please use the `post_delete_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the KeyManagementService server but before + it is returned to user code. This `post_delete_crypto_key_version` interceptor runs + before the `post_delete_crypto_key_version_with_metadata` interceptor. + """ + return response + + def post_delete_crypto_key_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_delete_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_delete_crypto_key_version` interceptor. + When both interceptors are used, this `post_delete_crypto_key_version_with_metadata` interceptor runs after the + `post_delete_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_delete_crypto_key_version` will be passed to + `post_delete_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_destroy_crypto_key_version( self, request: service.DestroyCryptoKeyVersionRequest, @@ -1054,6 +1180,54 @@ def post_get_public_key_with_metadata( """ return response, metadata + def pre_get_retired_resource( + self, + request: service.GetRetiredResourceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GetRetiredResourceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_retired_resource + + Override in a subclass to manipulate the request or metadata + before they are sent to the KeyManagementService server. + """ + return request, metadata + + def post_get_retired_resource( + self, response: resources.RetiredResource + ) -> resources.RetiredResource: + """Post-rpc interceptor for get_retired_resource + + DEPRECATED. Please use the `post_get_retired_resource_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the KeyManagementService server but before + it is returned to user code. This `post_get_retired_resource` interceptor runs + before the `post_get_retired_resource_with_metadata` interceptor. + """ + return response + + def post_get_retired_resource_with_metadata( + self, + response: resources.RetiredResource, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.RetiredResource, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_retired_resource + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_retired_resource_with_metadata` + interceptor in new development instead of the `post_get_retired_resource` interceptor. + When both interceptors are used, this `post_get_retired_resource_with_metadata` interceptor runs after the + `post_get_retired_resource` interceptor. The (possibly modified) response returned by + `post_get_retired_resource` will be passed to + `post_get_retired_resource_with_metadata`. + """ + return response, metadata + def pre_import_crypto_key_version( self, request: service.ImportCryptoKeyVersionRequest, @@ -1290,6 +1464,56 @@ def post_list_key_rings_with_metadata( """ return response, metadata + def pre_list_retired_resources( + self, + request: service.ListRetiredResourcesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListRetiredResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_retired_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the KeyManagementService server. + """ + return request, metadata + + def post_list_retired_resources( + self, response: service.ListRetiredResourcesResponse + ) -> service.ListRetiredResourcesResponse: + """Post-rpc interceptor for list_retired_resources + + DEPRECATED. Please use the `post_list_retired_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the KeyManagementService server but before + it is returned to user code. This `post_list_retired_resources` interceptor runs + before the `post_list_retired_resources_with_metadata` interceptor. + """ + return response + + def post_list_retired_resources_with_metadata( + self, + response: service.ListRetiredResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListRetiredResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_retired_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_list_retired_resources_with_metadata` + interceptor in new development instead of the `post_list_retired_resources` interceptor. + When both interceptors are used, this `post_list_retired_resources_with_metadata` interceptor runs after the + `post_list_retired_resources` interceptor. The (possibly modified) response returned by + `post_list_retired_resources` will be passed to + `post_list_retired_resources_with_metadata`. + """ + return response, metadata + def pre_mac_sign( self, request: service.MacSignRequest, @@ -1906,11 +2130,46 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or KeyManagementServiceRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _AsymmetricDecrypt( _BaseKeyManagementServiceRestTransport._BaseAsymmetricDecrypt, KeyManagementServiceRestStub, @@ -3221,12 +3480,12 @@ def __call__( ) return resp - class _DestroyCryptoKeyVersion( - _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion, + class _DeleteCryptoKey( + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKey, KeyManagementServiceRestStub, ): def __hash__(self): - return hash("KeyManagementServiceRestTransport.DestroyCryptoKeyVersion") + return hash("KeyManagementServiceRestTransport.DeleteCryptoKey") @staticmethod def _get_response( @@ -3247,70 +3506,52 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: service.DestroyCryptoKeyVersionRequest, + request: service.DeleteCryptoKeyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.CryptoKeyVersion: - r"""Call the destroy crypto key - version method over HTTP. - - Args: - request (~.service.DestroyCryptoKeyVersionRequest): - The request object. Request message for - [KeyManagementService.DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.resources.CryptoKeyVersion: - A - [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] - represents an individual cryptographic key, and the - associated key material. + ) -> operations_pb2.Operation: + r"""Call the delete crypto key method over HTTP. - An - [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] - version can be used for cryptographic operations. + Args: + request (~.service.DeleteCryptoKeyRequest): + The request object. Request message for + [KeyManagementService.DeleteCryptoKey][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - For security reasons, the raw cryptographic key material - represented by a - [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] - can never be viewed or exported. It can only be used to - encrypt, decrypt, or sign data when an authorized user - or application invokes Cloud KMS. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_http_options() + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKey._get_http_options() ) - request, metadata = self._interceptor.pre_destroy_crypto_key_version( + request, metadata = self._interceptor.pre_delete_crypto_key( request, metadata ) - transcoded_request = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_transcoded_request( + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKey._get_transcoded_request( http_options, request ) - body = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_request_body_json( - transcoded_request - ) - # Jsonify the query params - query_params = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_query_params_json( + query_params = _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKey._get_query_params_json( transcoded_request ) @@ -3332,24 +3573,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.DestroyCryptoKeyVersion", + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.DeleteCryptoKey", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "DestroyCryptoKeyVersion", + "rpcName": "DeleteCryptoKey", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = KeyManagementServiceRestTransport._DestroyCryptoKeyVersion._get_response( + response = KeyManagementServiceRestTransport._DeleteCryptoKey._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3358,21 +3598,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.CryptoKeyVersion() - pb_resp = resources.CryptoKeyVersion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_destroy_crypto_key_version(resp) + resp = self._interceptor.post_delete_crypto_key(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_destroy_crypto_key_version_with_metadata( + resp, _ = self._interceptor.post_delete_crypto_key_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resources.CryptoKeyVersion.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -3381,22 +3619,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.kms_v1.KeyManagementServiceClient.destroy_crypto_key_version", + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.delete_crypto_key", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "DestroyCryptoKeyVersion", + "rpcName": "DeleteCryptoKey", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _Encrypt( - _BaseKeyManagementServiceRestTransport._BaseEncrypt, + class _DeleteCryptoKeyVersion( + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKeyVersion, KeyManagementServiceRestStub, ): def __hash__(self): - return hash("KeyManagementServiceRestTransport.Encrypt") + return hash("KeyManagementServiceRestTransport.DeleteCryptoKeyVersion") @staticmethod def _get_response( @@ -3417,24 +3655,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: service.EncryptRequest, + request: service.DeleteCryptoKeyVersionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> service.EncryptResponse: - r"""Call the encrypt method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete crypto key version method over HTTP. Args: - request (~.service.EncryptRequest): + request (~.service.DeleteCryptoKeyVersionRequest): The request object. Request message for - [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3444,27 +3681,26 @@ def __call__( be of type `bytes`. Returns: - ~.service.EncryptResponse: - Response message for - [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_http_options() + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKeyVersion._get_http_options() ) - request, metadata = self._interceptor.pre_encrypt(request, metadata) - transcoded_request = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_delete_crypto_key_version( + request, metadata ) - - body = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_request_body_json( - transcoded_request + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKeyVersion._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_query_params_json( + query_params = _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKeyVersion._get_query_params_json( transcoded_request ) @@ -3486,31 +3722,354 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.Encrypt", + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.DeleteCryptoKeyVersion", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "Encrypt", + "rpcName": "DeleteCryptoKeyVersion", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = KeyManagementServiceRestTransport._Encrypt._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - + response = ( + KeyManagementServiceRestTransport._DeleteCryptoKeyVersion._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_crypto_key_version_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.delete_crypto_key_version", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "DeleteCryptoKeyVersion", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DestroyCryptoKeyVersion( + _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion, + KeyManagementServiceRestStub, + ): + def __hash__(self): + return hash("KeyManagementServiceRestTransport.DestroyCryptoKeyVersion") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.DestroyCryptoKeyVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.CryptoKeyVersion: + r"""Call the destroy crypto key + version method over HTTP. + + Args: + request (~.service.DestroyCryptoKeyVersionRequest): + The request object. Request message for + [KeyManagementService.DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.CryptoKeyVersion: + A + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] + represents an individual cryptographic key, and the + associated key material. + + An + [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] + version can be used for cryptographic operations. + + For security reasons, the raw cryptographic key material + represented by a + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] + can never be viewed or exported. It can only be used to + encrypt, decrypt, or sign data when an authorized user + or application invokes Cloud KMS. + + """ + + http_options = ( + _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_http_options() + ) + + request, metadata = self._interceptor.pre_destroy_crypto_key_version( + request, metadata + ) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_transcoded_request( + http_options, request + ) + + body = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseKeyManagementServiceRestTransport._BaseDestroyCryptoKeyVersion._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.DestroyCryptoKeyVersion", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "DestroyCryptoKeyVersion", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = KeyManagementServiceRestTransport._DestroyCryptoKeyVersion._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.CryptoKeyVersion() + pb_resp = resources.CryptoKeyVersion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_destroy_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_destroy_crypto_key_version_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resources.CryptoKeyVersion.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.destroy_crypto_key_version", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "DestroyCryptoKeyVersion", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Encrypt( + _BaseKeyManagementServiceRestTransport._BaseEncrypt, + KeyManagementServiceRestStub, + ): + def __hash__(self): + return hash("KeyManagementServiceRestTransport.Encrypt") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.EncryptRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.EncryptResponse: + r"""Call the encrypt method over HTTP. + + Args: + request (~.service.EncryptRequest): + The request object. Request message for + [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.EncryptResponse: + Response message for + [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. + + """ + + http_options = ( + _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_http_options() + ) + + request, metadata = self._interceptor.pre_encrypt(request, metadata) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_transcoded_request( + http_options, request + ) + + body = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseKeyManagementServiceRestTransport._BaseEncrypt._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.Encrypt", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "Encrypt", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = KeyManagementServiceRestTransport._Encrypt._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + # Return the response resp = service.EncryptResponse() pb_resp = service.EncryptResponse.pb(resp) @@ -4197,7 +4756,156 @@ def __call__( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resources.ImportJob.to_json(response) + response_payload = resources.ImportJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_import_job", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "GetImportJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetKeyRing( + _BaseKeyManagementServiceRestTransport._BaseGetKeyRing, + KeyManagementServiceRestStub, + ): + def __hash__(self): + return hash("KeyManagementServiceRestTransport.GetKeyRing") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: service.GetKeyRingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.KeyRing: + r"""Call the get key ring method over HTTP. + + Args: + request (~.service.GetKeyRingRequest): + The request object. Request message for + [KeyManagementService.GetKeyRing][google.cloud.kms.v1.KeyManagementService.GetKeyRing]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.KeyRing: + A [KeyRing][google.cloud.kms.v1.KeyRing] is a toplevel + logical grouping of + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. + + """ + + http_options = ( + _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_key_ring(request, metadata) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.GetKeyRing", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "GetKeyRing", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = KeyManagementServiceRestTransport._GetKeyRing._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.KeyRing() + pb_resp = resources.KeyRing.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_key_ring(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_ring_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resources.KeyRing.to_json(response) except: response_payload = None http_response = { @@ -4206,22 +4914,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_import_job", + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_key_ring", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "GetImportJob", + "rpcName": "GetKeyRing", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetKeyRing( - _BaseKeyManagementServiceRestTransport._BaseGetKeyRing, + class _GetPublicKey( + _BaseKeyManagementServiceRestTransport._BaseGetPublicKey, KeyManagementServiceRestStub, ): def __hash__(self): - return hash("KeyManagementServiceRestTransport.GetKeyRing") + return hash("KeyManagementServiceRestTransport.GetPublicKey") @staticmethod def _get_response( @@ -4247,18 +4955,18 @@ def _get_response( def __call__( self, - request: service.GetKeyRingRequest, + request: service.GetPublicKeyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.KeyRing: - r"""Call the get key ring method over HTTP. + ) -> resources.PublicKey: + r"""Call the get public key method over HTTP. Args: - request (~.service.GetKeyRingRequest): + request (~.service.GetPublicKeyRequest): The request object. Request message for - [KeyManagementService.GetKeyRing][google.cloud.kms.v1.KeyManagementService.GetKeyRing]. + [KeyManagementService.GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4268,24 +4976,25 @@ def __call__( be of type `bytes`. Returns: - ~.resources.KeyRing: - A [KeyRing][google.cloud.kms.v1.KeyRing] is a toplevel - logical grouping of - [CryptoKeys][google.cloud.kms.v1.CryptoKey]. + ~.resources.PublicKey: + The public keys for a given + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. + Obtained via + [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey]. """ http_options = ( - _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_http_options() + _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_http_options() ) - request, metadata = self._interceptor.pre_get_key_ring(request, metadata) - transcoded_request = _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_transcoded_request( + request, metadata = self._interceptor.pre_get_public_key(request, metadata) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseKeyManagementServiceRestTransport._BaseGetKeyRing._get_query_params_json( + query_params = _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_query_params_json( transcoded_request ) @@ -4307,17 +5016,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.GetKeyRing", + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.GetPublicKey", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "GetKeyRing", + "rpcName": "GetPublicKey", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = KeyManagementServiceRestTransport._GetKeyRing._get_response( + response = KeyManagementServiceRestTransport._GetPublicKey._get_response( self._host, metadata, query_params, @@ -4332,21 +5041,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.KeyRing() - pb_resp = resources.KeyRing.pb(resp) + resp = resources.PublicKey() + pb_resp = resources.PublicKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_key_ring(resp) + resp = self._interceptor.post_get_public_key(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_key_ring_with_metadata( + resp, _ = self._interceptor.post_get_public_key_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resources.KeyRing.to_json(response) + response_payload = resources.PublicKey.to_json(response) except: response_payload = None http_response = { @@ -4355,22 +5064,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_key_ring", + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_public_key", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "GetKeyRing", + "rpcName": "GetPublicKey", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetPublicKey( - _BaseKeyManagementServiceRestTransport._BaseGetPublicKey, + class _GetRetiredResource( + _BaseKeyManagementServiceRestTransport._BaseGetRetiredResource, KeyManagementServiceRestStub, ): def __hash__(self): - return hash("KeyManagementServiceRestTransport.GetPublicKey") + return hash("KeyManagementServiceRestTransport.GetRetiredResource") @staticmethod def _get_response( @@ -4396,18 +5105,18 @@ def _get_response( def __call__( self, - request: service.GetPublicKeyRequest, + request: service.GetRetiredResourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.PublicKey: - r"""Call the get public key method over HTTP. + ) -> resources.RetiredResource: + r"""Call the get retired resource method over HTTP. Args: - request (~.service.GetPublicKeyRequest): + request (~.service.GetRetiredResourceRequest): The request object. Request message for - [KeyManagementService.GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey]. + [KeyManagementService.GetRetiredResource][google.cloud.kms.v1.KeyManagementService.GetRetiredResource]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4417,25 +5126,28 @@ def __call__( be of type `bytes`. Returns: - ~.resources.PublicKey: - The public keys for a given - [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. - Obtained via - [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey]. + ~.resources.RetiredResource: + A RetiredResource resource represents the record of a + deleted [CryptoKey][google.cloud.kms.v1.CryptoKey]. Its + purpose is to provide visibility into retained user data + and to prevent reuse of these names for new + [CryptoKeys][google.cloud.kms.v1.CryptoKey]. """ http_options = ( - _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_http_options() + _BaseKeyManagementServiceRestTransport._BaseGetRetiredResource._get_http_options() ) - request, metadata = self._interceptor.pre_get_public_key(request, metadata) - transcoded_request = _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_transcoded_request( + request, metadata = self._interceptor.pre_get_retired_resource( + request, metadata + ) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseGetRetiredResource._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseKeyManagementServiceRestTransport._BaseGetPublicKey._get_query_params_json( + query_params = _BaseKeyManagementServiceRestTransport._BaseGetRetiredResource._get_query_params_json( transcoded_request ) @@ -4457,23 +5169,25 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.GetPublicKey", + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.GetRetiredResource", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "GetPublicKey", + "rpcName": "GetRetiredResource", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = KeyManagementServiceRestTransport._GetPublicKey._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, + response = ( + KeyManagementServiceRestTransport._GetRetiredResource._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4482,21 +5196,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resources.PublicKey() - pb_resp = resources.PublicKey.pb(resp) + resp = resources.RetiredResource() + pb_resp = resources.RetiredResource.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_public_key(resp) + resp = self._interceptor.post_get_retired_resource(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_public_key_with_metadata( + resp, _ = self._interceptor.post_get_retired_resource_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resources.PublicKey.to_json(response) + response_payload = resources.RetiredResource.to_json(response) except: response_payload = None http_response = { @@ -4505,10 +5219,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_public_key", + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.get_retired_resource", extra={ "serviceName": "google.cloud.kms.v1.KeyManagementService", - "rpcName": "GetPublicKey", + "rpcName": "GetRetiredResource", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -5288,6 +6002,160 @@ def __call__( ) return resp + class _ListRetiredResources( + _BaseKeyManagementServiceRestTransport._BaseListRetiredResources, + KeyManagementServiceRestStub, + ): + def __hash__(self): + return hash("KeyManagementServiceRestTransport.ListRetiredResources") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: service.ListRetiredResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.ListRetiredResourcesResponse: + r"""Call the list retired resources method over HTTP. + + Args: + request (~.service.ListRetiredResourcesRequest): + The request object. Request message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListRetiredResourcesResponse: + Response message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + + """ + + http_options = ( + _BaseKeyManagementServiceRestTransport._BaseListRetiredResources._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_retired_resources( + request, metadata + ) + transcoded_request = _BaseKeyManagementServiceRestTransport._BaseListRetiredResources._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseKeyManagementServiceRestTransport._BaseListRetiredResources._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.kms_v1.KeyManagementServiceClient.ListRetiredResources", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "ListRetiredResources", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + KeyManagementServiceRestTransport._ListRetiredResources._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListRetiredResourcesResponse() + pb_resp = service.ListRetiredResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_retired_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_retired_resources_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = service.ListRetiredResourcesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.kms_v1.KeyManagementServiceClient.list_retired_resources", + extra={ + "serviceName": "google.cloud.kms.v1.KeyManagementService", + "rpcName": "ListRetiredResources", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _MacSign( _BaseKeyManagementServiceRestTransport._BaseMacSign, KeyManagementServiceRestStub, @@ -6641,6 +7509,22 @@ def decrypt(self) -> Callable[[service.DecryptRequest], service.DecryptResponse] # In C++ this would require a dynamic_cast return self._Decrypt(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_crypto_key( + self, + ) -> Callable[[service.DeleteCryptoKeyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCryptoKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_crypto_key_version( + self, + ) -> Callable[[service.DeleteCryptoKeyVersionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCryptoKeyVersion(self._session, self._host, self._interceptor) # type: ignore + @property def destroy_crypto_key_version( self, @@ -6703,6 +7587,14 @@ def get_public_key( # In C++ this would require a dynamic_cast return self._GetPublicKey(self._session, self._host, self._interceptor) # type: ignore + @property + def get_retired_resource( + self, + ) -> Callable[[service.GetRetiredResourceRequest], resources.RetiredResource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRetiredResource(self._session, self._host, self._interceptor) # type: ignore + @property def import_crypto_key_version( self, @@ -6745,6 +7637,16 @@ def list_key_rings( # In C++ this would require a dynamic_cast return self._ListKeyRings(self._session, self._host, self._interceptor) # type: ignore + @property + def list_retired_resources( + self, + ) -> Callable[ + [service.ListRetiredResourcesRequest], service.ListRetiredResourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRetiredResources(self._session, self._host, self._interceptor) # type: ignore + @property def mac_sign(self) -> Callable[[service.MacSignRequest], service.MacSignResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest_base.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest_base.py index 9b8ba9ad899b..037acf375009 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest_base.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest_base.py @@ -553,6 +553,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteCryptoKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteCryptoKeyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKey._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteCryptoKeyVersion: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteCryptoKeyVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseKeyManagementServiceRestTransport._BaseDeleteCryptoKeyVersion._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDestroyCryptoKeyVersion: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -944,6 +1038,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetRetiredResource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/retiredResources/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetRetiredResourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseKeyManagementServiceRestTransport._BaseGetRetiredResource._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseImportCryptoKeyVersion: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1189,6 +1330,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListRetiredResources: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/retiredResources", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListRetiredResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseKeyManagementServiceRestTransport._BaseListRetiredResources._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseMacSign: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py index 7534e0f78c54..ea1899da84fd 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/__init__.py @@ -77,6 +77,7 @@ KeyRing, ProtectionLevel, PublicKey, + RetiredResource, ) from .service import ( AsymmetricDecryptRequest, @@ -91,6 +92,10 @@ DecapsulateResponse, DecryptRequest, DecryptResponse, + DeleteCryptoKeyMetadata, + DeleteCryptoKeyRequest, + DeleteCryptoKeyVersionMetadata, + DeleteCryptoKeyVersionRequest, DestroyCryptoKeyVersionRequest, Digest, EncryptRequest, @@ -102,6 +107,7 @@ GetImportJobRequest, GetKeyRingRequest, GetPublicKeyRequest, + GetRetiredResourceRequest, ImportCryptoKeyVersionRequest, ListCryptoKeysRequest, ListCryptoKeysResponse, @@ -111,6 +117,8 @@ ListImportJobsResponse, ListKeyRingsRequest, ListKeyRingsResponse, + ListRetiredResourcesRequest, + ListRetiredResourcesResponse, LocationMetadata, MacSignRequest, MacSignResponse, @@ -180,6 +188,7 @@ "KeyOperationAttestation", "KeyRing", "PublicKey", + "RetiredResource", "AccessReason", "ProtectionLevel", "AsymmetricDecryptRequest", @@ -194,6 +203,10 @@ "DecapsulateResponse", "DecryptRequest", "DecryptResponse", + "DeleteCryptoKeyMetadata", + "DeleteCryptoKeyRequest", + "DeleteCryptoKeyVersionMetadata", + "DeleteCryptoKeyVersionRequest", "DestroyCryptoKeyVersionRequest", "Digest", "EncryptRequest", @@ -205,6 +218,7 @@ "GetImportJobRequest", "GetKeyRingRequest", "GetPublicKeyRequest", + "GetRetiredResourceRequest", "ImportCryptoKeyVersionRequest", "ListCryptoKeysRequest", "ListCryptoKeysResponse", @@ -214,6 +228,8 @@ "ListImportJobsResponse", "ListKeyRingsRequest", "ListKeyRingsResponse", + "ListRetiredResourcesRequest", + "ListRetiredResourcesResponse", "LocationMetadata", "MacSignRequest", "MacSignResponse", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py index fc8d1a62840e..d288c6f2357a 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/autokey_admin.py @@ -66,7 +66,8 @@ class GetAutokeyConfigRequest(proto.Message): name (str): Required. Name of the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] resource, - e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig``. + e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig`` or + ``projects/{PROJECT_NUMBER}/autokeyConfig``. """ name: str = proto.Field( @@ -82,7 +83,8 @@ class AutokeyConfig(proto.Message): name (str): Identifier. Name of the [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] resource, - e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig``. + e.g. ``folders/{FOLDER_NUMBER}/autokeyConfig`` or + ``projects/{PROJECT_NUMBER}/autokeyConfig``. key_project (str): Optional. Name of the key project, e.g. ``projects/{PROJECT_ID}`` or ``projects/{PROJECT_NUMBER}``, @@ -106,6 +108,10 @@ class AutokeyConfig(proto.Message): client has an up-to-date value before proceeding. The request will be rejected with an ABORTED error on a mismatched etag. + key_project_resolution_mode (google.cloud.kms_v1.types.AutokeyConfig.KeyProjectResolutionMode): + Optional. KeyProjectResolutionMode for the AutokeyConfig. + Valid values are ``DEDICATED_KEY_PROJECT``, + ``RESOURCE_PROJECT``, or ``DISABLED``. """ class State(proto.Enum): @@ -125,11 +131,51 @@ class State(proto.Enum): The AutokeyConfig is not yet initialized or has been reset to its default uninitialized state. + KEY_PROJECT_PERMISSION_DENIED (4): + The service account lacks the necessary + permissions in the key project to configure + Autokey. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 KEY_PROJECT_DELETED = 2 UNINITIALIZED = 3 + KEY_PROJECT_PERMISSION_DENIED = 4 + + class KeyProjectResolutionMode(proto.Enum): + r"""Defines the resolution mode enum for the key project. The + [KeyProjectResolutionMode][google.cloud.kms.v1.AutokeyConfig.KeyProjectResolutionMode] + determines the mechanism by which + [AutokeyConfig][google.cloud.kms.v1.AutokeyConfig] identifies a + [key_project][google.cloud.kms.v1.AutokeyConfig.key_project] at its + specific configuration node. This parameter also determines if + Autokey can be used within this project or folder. + + Values: + KEY_PROJECT_RESOLUTION_MODE_UNSPECIFIED (0): + Default value. KeyProjectResolutionMode when not specified + will act as ``DEDICATED_KEY_PROJECT``. + DEDICATED_KEY_PROJECT (1): + Keys are created in a dedicated project specified by + ``key_project``. + RESOURCE_PROJECT (2): + Keys are created in the same project as the resource + requesting the key. The ``key_project`` must not be set when + this mode is used. + DISABLED (3): + Disables the AutokeyConfig. When this mode is set, any + AutokeyConfig from higher levels in the resource hierarchy + are ignored for this resource and its descendants. This + setting can be overridden by a more specific configuration + at a lower level. For example, if Autokey is disabled on a + folder, it can be re-enabled on a sub-folder or project + within that folder by setting a different mode (e.g., + DEDICATED_KEY_PROJECT or RESOURCE_PROJECT). + """ + KEY_PROJECT_RESOLUTION_MODE_UNSPECIFIED = 0 + DEDICATED_KEY_PROJECT = 1 + RESOURCE_PROJECT = 2 + DISABLED = 3 name: str = proto.Field( proto.STRING, @@ -148,6 +194,11 @@ class State(proto.Enum): proto.STRING, number=6, ) + key_project_resolution_mode: KeyProjectResolutionMode = proto.Field( + proto.ENUM, + number=8, + enum=KeyProjectResolutionMode, + ) class ShowEffectiveAutokeyConfigRequest(proto.Message): diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index b56206161983..7b2c7b4d50f3 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -37,6 +37,7 @@ "ImportJob", "ExternalProtectionLevelOptions", "KeyAccessJustificationsPolicy", + "RetiredResource", }, ) @@ -823,14 +824,42 @@ class CryptoKeyVersionAlgorithm(proto.Enum): X-Wing hybrid KEM combining ML-KEM-768 with X25519 following datatracker.ietf.org/doc/draft-connolly-cfrg-xwing-kem/. + PQ_SIGN_ML_DSA_44 (68): + The post-quantum Module-Lattice-Based Digital + Signature Algorithm, at security level 1. + Randomized version. PQ_SIGN_ML_DSA_65 (56): The post-quantum Module-Lattice-Based Digital Signature Algorithm, at security level 3. Randomized version. + PQ_SIGN_ML_DSA_87 (69): + The post-quantum Module-Lattice-Based Digital + Signature Algorithm, at security level 5. + Randomized version. PQ_SIGN_SLH_DSA_SHA2_128S (57): The post-quantum stateless hash-based digital signature algorithm, at security level 1. Randomized version. + PQ_SIGN_HASH_SLH_DSA_SHA2_128S_SHA256 (60): + The post-quantum stateless hash-based digital + signature algorithm, at security level 1. + Randomized pre-hash version supporting SHA256 + digests. + PQ_SIGN_ML_DSA_44_EXTERNAL_MU (70): + The post-quantum Module-Lattice-Based Digital + Signature Algorithm, at security level 1. + Randomized version supporting + externally-computed message representatives. + PQ_SIGN_ML_DSA_65_EXTERNAL_MU (67): + The post-quantum Module-Lattice-Based Digital + Signature Algorithm, at security level 3. + Randomized version supporting + externally-computed message representatives. + PQ_SIGN_ML_DSA_87_EXTERNAL_MU (71): + The post-quantum Module-Lattice-Based Digital + Signature Algorithm, at security level 5. + Randomized version supporting + externally-computed message representatives. """ CRYPTO_KEY_VERSION_ALGORITHM_UNSPECIFIED = 0 GOOGLE_SYMMETRIC_ENCRYPTION = 1 @@ -871,8 +900,14 @@ class CryptoKeyVersionAlgorithm(proto.Enum): ML_KEM_768 = 47 ML_KEM_1024 = 48 KEM_XWING = 63 + PQ_SIGN_ML_DSA_44 = 68 PQ_SIGN_ML_DSA_65 = 56 + PQ_SIGN_ML_DSA_87 = 69 PQ_SIGN_SLH_DSA_SHA2_128S = 57 + PQ_SIGN_HASH_SLH_DSA_SHA2_128S_SHA256 = 60 + PQ_SIGN_ML_DSA_44_EXTERNAL_MU = 70 + PQ_SIGN_ML_DSA_65_EXTERNAL_MU = 67 + PQ_SIGN_ML_DSA_87_EXTERNAL_MU = 71 class CryptoKeyVersionState(proto.Enum): r"""The state of a @@ -1547,4 +1582,48 @@ class KeyAccessJustificationsPolicy(proto.Message): ) +class RetiredResource(proto.Message): + r"""A RetiredResource resource represents the record of a deleted + [CryptoKey][google.cloud.kms.v1.CryptoKey]. Its purpose is to + provide visibility into retained user data and to prevent reuse of + these names for new [CryptoKeys][google.cloud.kms.v1.CryptoKey]. + + Attributes: + name (str): + Output only. Identifier. The resource name for this + [RetiredResource][google.cloud.kms.v1.RetiredResource] in + the format ``projects/*/locations/*/retiredResources/*``. + original_resource (str): + Output only. The full resource name of the original + [CryptoKey][google.cloud.kms.v1.CryptoKey] that was deleted + in the format + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + resource_type (str): + Output only. The resource type of the + original deleted resource. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the original + resource was deleted and this RetiredResource + record was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + original_resource: str = proto.Field( + proto.STRING, + number=2, + ) + resource_type: str = proto.Field( + proto.STRING, + number=3, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py index b42b2cbde05a..4da0c5880827 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py @@ -30,18 +30,23 @@ "ListCryptoKeysRequest", "ListCryptoKeyVersionsRequest", "ListImportJobsRequest", + "ListRetiredResourcesRequest", "ListKeyRingsResponse", "ListCryptoKeysResponse", "ListCryptoKeyVersionsResponse", "ListImportJobsResponse", + "ListRetiredResourcesResponse", "GetKeyRingRequest", "GetCryptoKeyRequest", "GetCryptoKeyVersionRequest", "GetPublicKeyRequest", "GetImportJobRequest", + "GetRetiredResourceRequest", "CreateKeyRingRequest", "CreateCryptoKeyRequest", "CreateCryptoKeyVersionRequest", + "DeleteCryptoKeyRequest", + "DeleteCryptoKeyVersionRequest", "ImportCryptoKeyVersionRequest", "CreateImportJobRequest", "UpdateCryptoKeyRequest", @@ -71,6 +76,8 @@ "GenerateRandomBytesResponse", "Digest", "LocationMetadata", + "DeleteCryptoKeyMetadata", + "DeleteCryptoKeyVersionMetadata", }, ) @@ -309,6 +316,43 @@ class ListImportJobsRequest(proto.Message): ) +class ListRetiredResourcesRequest(proto.Message): + r"""Request message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + + Attributes: + parent (str): + Required. The project-specific location holding the + [RetiredResources][google.cloud.kms.v1.RetiredResource], in + the format ``projects/*/locations/*``. + page_size (int): + Optional. Optional limit on the number of + [RetiredResources][google.cloud.kms.v1.RetiredResource] to + be included in the response. Further + [RetiredResources][google.cloud.kms.v1.RetiredResource] can + subsequently be obtained by including the + [ListRetiredResourcesResponse.next_page_token][google.cloud.kms.v1.ListRetiredResourcesResponse.next_page_token] + in a subsequent request. If unspecified, the server will + pick an appropriate default. + page_token (str): + Optional. Optional pagination token, returned earlier via + [ListRetiredResourcesResponse.next_page_token][google.cloud.kms.v1.ListRetiredResourcesResponse.next_page_token]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + class ListKeyRingsResponse(proto.Message): r"""Response message for [KeyManagementService.ListKeyRings][google.cloud.kms.v1.KeyManagementService.ListKeyRings]. @@ -471,6 +515,44 @@ def raw_page(self): ) +class ListRetiredResourcesResponse(proto.Message): + r"""Response message for + [KeyManagementService.ListRetiredResources][google.cloud.kms.v1.KeyManagementService.ListRetiredResources]. + + Attributes: + retired_resources (MutableSequence[google.cloud.kms_v1.types.RetiredResource]): + The list of + [RetiredResources][google.cloud.kms.v1.RetiredResource]. + next_page_token (str): + A token to retrieve the next page of results. Pass this + value in + [ListRetiredResourcesRequest.page_token][google.cloud.kms.v1.ListRetiredResourcesRequest.page_token] + to retrieve the next page of results. + total_size (int): + The total number of + [RetiredResources][google.cloud.kms.v1.RetiredResource] that + matched the query. + """ + + @property + def raw_page(self): + return self + + retired_resources: MutableSequence[resources.RetiredResource] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.RetiredResource, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT64, + number=3, + ) + + class GetKeyRingRequest(proto.Message): r"""Request message for [KeyManagementService.GetKeyRing][google.cloud.kms.v1.KeyManagementService.GetKeyRing]. @@ -570,6 +652,24 @@ class GetImportJobRequest(proto.Message): ) +class GetRetiredResourceRequest(proto.Message): + r"""Request message for + [KeyManagementService.GetRetiredResource][google.cloud.kms.v1.KeyManagementService.GetRetiredResource]. + + Attributes: + name (str): + Required. The + [name][google.cloud.kms.v1.RetiredResource.name] of the + [RetiredResource][google.cloud.kms.v1.RetiredResource] to + get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class CreateKeyRingRequest(proto.Message): r"""Request message for [KeyManagementService.CreateKeyRing][google.cloud.kms.v1.KeyManagementService.CreateKeyRing]. @@ -675,6 +775,40 @@ class CreateCryptoKeyVersionRequest(proto.Message): ) +class DeleteCryptoKeyRequest(proto.Message): + r"""Request message for + [KeyManagementService.DeleteCryptoKey][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey]. + + Attributes: + name (str): + Required. The [name][google.cloud.kms.v1.CryptoKey.name] of + the [CryptoKey][google.cloud.kms.v1.CryptoKey] to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteCryptoKeyVersionRequest(proto.Message): + r"""Request message for + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion]. + + Attributes: + name (str): + Required. The + [name][google.cloud.kms.v1.CryptoKeyVersion.name] of the + [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ImportCryptoKeyVersionRequest(proto.Message): r"""Request message for [KeyManagementService.ImportCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.ImportCryptoKeyVersion]. @@ -2709,4 +2843,31 @@ class LocationMetadata(proto.Message): ) +class DeleteCryptoKeyMetadata(proto.Message): + r"""Represents the metadata of the + [KeyManagementService.DeleteCryptoKey][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey] + long-running operation. + + Attributes: + retired_resource (str): + Output only. The resource name of the + [RetiredResource][google.cloud.kms.v1.RetiredResource] + created as a result of this operation, in the format + ``projects/*/locations/*/retiredResources/*``. + """ + + retired_resource: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteCryptoKeyVersionMetadata(proto.Message): + r"""Represents the metadata of the + [KeyManagementService.DeleteCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion] + long-running operation. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py new file mode 100644 index 000000000000..511921e22d19 --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCryptoKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +async def sample_delete_crypto_key(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py new file mode 100644 index 000000000000..cb295410c1ca --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCryptoKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +def sample_delete_crypto_key(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py new file mode 100644 index 000000000000..fd3100190d2f --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCryptoKeyVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +async def sample_delete_crypto_key_version(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key_version(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py new file mode 100644 index 000000000000..f9718d9cc3cf --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCryptoKeyVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +def sample_delete_crypto_key_version(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.DeleteCryptoKeyVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_crypto_key_version(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_async.py new file mode 100644 index 000000000000..a04b87ea96ee --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRetiredResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_GetRetiredResource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +async def sample_get_retired_resource(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.GetRetiredResourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_retired_resource(request=request) + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_GetRetiredResource_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py new file mode 100644 index 000000000000..91e878b79371 --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRetiredResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_GetRetiredResource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +def sample_get_retired_resource(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.GetRetiredResourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_retired_resource(request=request) + + # Handle the response + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_GetRetiredResource_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_async.py index 7c4e14fa6c16..1f16c340be17 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_async.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_async.py @@ -42,7 +42,7 @@ async def sample_import_crypto_key_version(): request = kms_v1.ImportCryptoKeyVersionRequest( rsa_aes_wrapped_key=b"rsa_aes_wrapped_key_blob", parent="parent_value", - algorithm="PQ_SIGN_SLH_DSA_SHA2_128S", + algorithm="PQ_SIGN_ML_DSA_87_EXTERNAL_MU", import_job="import_job_value", ) diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_sync.py index 42256175e4ba..c1f31f755f3f 100644 --- a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_sync.py +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_import_crypto_key_version_sync.py @@ -42,7 +42,7 @@ def sample_import_crypto_key_version(): request = kms_v1.ImportCryptoKeyVersionRequest( rsa_aes_wrapped_key=b"rsa_aes_wrapped_key_blob", parent="parent_value", - algorithm="PQ_SIGN_SLH_DSA_SHA2_128S", + algorithm="PQ_SIGN_ML_DSA_87_EXTERNAL_MU", import_job="import_job_value", ) diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_async.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_async.py new file mode 100644 index 000000000000..a210730e9b75 --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRetiredResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_ListRetiredResources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +async def sample_list_retired_resources(): + # Create a client + client = kms_v1.KeyManagementServiceAsyncClient() + + # Initialize request argument(s) + request = kms_v1.ListRetiredResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_retired_resources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_ListRetiredResources_async] diff --git a/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py new file mode 100644 index 000000000000..986acafb2705 --- /dev/null +++ b/packages/google-cloud-kms/samples/generated_samples/cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRetiredResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-kms + + +# [START cloudkms_v1_generated_KeyManagementService_ListRetiredResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import kms_v1 + + +def sample_list_retired_resources(): + # Create a client + client = kms_v1.KeyManagementServiceClient() + + # Initialize request argument(s) + request = kms_v1.ListRetiredResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_retired_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END cloudkms_v1_generated_KeyManagementService_ListRetiredResources_sync] diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index a4d1f1c33dbb..b9042d7ce3a5 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -5003,6 +5003,328 @@ ], "title": "cloudkms_v1_generated_key_management_service_decrypt_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", + "shortName": "KeyManagementServiceAsyncClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.delete_crypto_key_version", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "DeleteCryptoKeyVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.DeleteCryptoKeyVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_crypto_key_version" + }, + "description": "Sample for DeleteCryptoKeyVersion", + "file": "cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_delete_crypto_key_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", + "shortName": "KeyManagementServiceClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.delete_crypto_key_version", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.DeleteCryptoKeyVersion", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "DeleteCryptoKeyVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.DeleteCryptoKeyVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_crypto_key_version" + }, + "description": "Sample for DeleteCryptoKeyVersion", + "file": "cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_DeleteCryptoKeyVersion_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_delete_crypto_key_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", + "shortName": "KeyManagementServiceAsyncClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.delete_crypto_key", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "DeleteCryptoKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.DeleteCryptoKeyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_crypto_key" + }, + "description": "Sample for DeleteCryptoKey", + "file": "cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_delete_crypto_key_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", + "shortName": "KeyManagementServiceClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.delete_crypto_key", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.DeleteCryptoKey", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "DeleteCryptoKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.DeleteCryptoKeyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_crypto_key" + }, + "description": "Sample for DeleteCryptoKey", + "file": "cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_DeleteCryptoKey_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_delete_crypto_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5771,7 +6093,168 @@ "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetCryptoKeyRequest" + "type": "google.cloud.kms_v1.types.GetCryptoKeyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.kms_v1.types.CryptoKey", + "shortName": "get_crypto_key" + }, + "description": "Sample for GetCryptoKey", + "file": "cloudkms_v1_generated_key_management_service_get_crypto_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetCryptoKey_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_get_crypto_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", + "shortName": "KeyManagementServiceAsyncClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_import_job", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.GetImportJob", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "GetImportJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.GetImportJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.kms_v1.types.ImportJob", + "shortName": "get_import_job" + }, + "description": "Sample for GetImportJob", + "file": "cloudkms_v1_generated_key_management_service_get_import_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetImportJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_get_import_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", + "shortName": "KeyManagementServiceClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_import_job", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.GetImportJob", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "GetImportJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.GetImportJobRequest" }, { "name": "name", @@ -5790,14 +6273,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.CryptoKey", - "shortName": "get_crypto_key" + "resultType": "google.cloud.kms_v1.types.ImportJob", + "shortName": "get_import_job" }, - "description": "Sample for GetCryptoKey", - "file": "cloudkms_v1_generated_key_management_service_get_crypto_key_sync.py", + "description": "Sample for GetImportJob", + "file": "cloudkms_v1_generated_key_management_service_get_import_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetCryptoKey_sync", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetImportJob_sync", "segments": [ { "end": 51, @@ -5830,7 +6313,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_crypto_key_sync.py" + "title": "cloudkms_v1_generated_key_management_service_get_import_job_sync.py" }, { "canonical": true, @@ -5840,19 +6323,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", "shortName": "KeyManagementServiceAsyncClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_import_job", + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_key_ring", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetImportJob", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetKeyRing", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetImportJob" + "shortName": "GetKeyRing" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetImportJobRequest" + "type": "google.cloud.kms_v1.types.GetKeyRingRequest" }, { "name": "name", @@ -5871,14 +6354,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.ImportJob", - "shortName": "get_import_job" + "resultType": "google.cloud.kms_v1.types.KeyRing", + "shortName": "get_key_ring" }, - "description": "Sample for GetImportJob", - "file": "cloudkms_v1_generated_key_management_service_get_import_job_async.py", + "description": "Sample for GetKeyRing", + "file": "cloudkms_v1_generated_key_management_service_get_key_ring_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetImportJob_async", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetKeyRing_async", "segments": [ { "end": 51, @@ -5911,7 +6394,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_import_job_async.py" + "title": "cloudkms_v1_generated_key_management_service_get_key_ring_async.py" }, { "canonical": true, @@ -5920,19 +6403,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", "shortName": "KeyManagementServiceClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_import_job", + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_key_ring", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetImportJob", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetKeyRing", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetImportJob" + "shortName": "GetKeyRing" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetImportJobRequest" + "type": "google.cloud.kms_v1.types.GetKeyRingRequest" }, { "name": "name", @@ -5951,14 +6434,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.ImportJob", - "shortName": "get_import_job" + "resultType": "google.cloud.kms_v1.types.KeyRing", + "shortName": "get_key_ring" }, - "description": "Sample for GetImportJob", - "file": "cloudkms_v1_generated_key_management_service_get_import_job_sync.py", + "description": "Sample for GetKeyRing", + "file": "cloudkms_v1_generated_key_management_service_get_key_ring_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetImportJob_sync", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetKeyRing_sync", "segments": [ { "end": 51, @@ -5991,7 +6474,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_import_job_sync.py" + "title": "cloudkms_v1_generated_key_management_service_get_key_ring_sync.py" }, { "canonical": true, @@ -6001,19 +6484,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", "shortName": "KeyManagementServiceAsyncClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_key_ring", + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_public_key", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetKeyRing", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetPublicKey", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetKeyRing" + "shortName": "GetPublicKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetKeyRingRequest" + "type": "google.cloud.kms_v1.types.GetPublicKeyRequest" }, { "name": "name", @@ -6032,14 +6515,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.KeyRing", - "shortName": "get_key_ring" + "resultType": "google.cloud.kms_v1.types.PublicKey", + "shortName": "get_public_key" }, - "description": "Sample for GetKeyRing", - "file": "cloudkms_v1_generated_key_management_service_get_key_ring_async.py", + "description": "Sample for GetPublicKey", + "file": "cloudkms_v1_generated_key_management_service_get_public_key_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetKeyRing_async", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetPublicKey_async", "segments": [ { "end": 51, @@ -6072,7 +6555,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_key_ring_async.py" + "title": "cloudkms_v1_generated_key_management_service_get_public_key_async.py" }, { "canonical": true, @@ -6081,19 +6564,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", "shortName": "KeyManagementServiceClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_key_ring", + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_public_key", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetKeyRing", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetPublicKey", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetKeyRing" + "shortName": "GetPublicKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetKeyRingRequest" + "type": "google.cloud.kms_v1.types.GetPublicKeyRequest" }, { "name": "name", @@ -6112,14 +6595,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.KeyRing", - "shortName": "get_key_ring" + "resultType": "google.cloud.kms_v1.types.PublicKey", + "shortName": "get_public_key" }, - "description": "Sample for GetKeyRing", - "file": "cloudkms_v1_generated_key_management_service_get_key_ring_sync.py", + "description": "Sample for GetPublicKey", + "file": "cloudkms_v1_generated_key_management_service_get_public_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetKeyRing_sync", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetPublicKey_sync", "segments": [ { "end": 51, @@ -6152,7 +6635,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_key_ring_sync.py" + "title": "cloudkms_v1_generated_key_management_service_get_public_key_sync.py" }, { "canonical": true, @@ -6162,19 +6645,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", "shortName": "KeyManagementServiceAsyncClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_public_key", + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.get_retired_resource", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetPublicKey", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetRetiredResource", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetPublicKey" + "shortName": "GetRetiredResource" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetPublicKeyRequest" + "type": "google.cloud.kms_v1.types.GetRetiredResourceRequest" }, { "name": "name", @@ -6193,14 +6676,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.PublicKey", - "shortName": "get_public_key" + "resultType": "google.cloud.kms_v1.types.RetiredResource", + "shortName": "get_retired_resource" }, - "description": "Sample for GetPublicKey", - "file": "cloudkms_v1_generated_key_management_service_get_public_key_async.py", + "description": "Sample for GetRetiredResource", + "file": "cloudkms_v1_generated_key_management_service_get_retired_resource_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetPublicKey_async", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetRetiredResource_async", "segments": [ { "end": 51, @@ -6233,7 +6716,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_public_key_async.py" + "title": "cloudkms_v1_generated_key_management_service_get_retired_resource_async.py" }, { "canonical": true, @@ -6242,19 +6725,19 @@ "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", "shortName": "KeyManagementServiceClient" }, - "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_public_key", + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.get_retired_resource", "method": { - "fullName": "google.cloud.kms.v1.KeyManagementService.GetPublicKey", + "fullName": "google.cloud.kms.v1.KeyManagementService.GetRetiredResource", "service": { "fullName": "google.cloud.kms.v1.KeyManagementService", "shortName": "KeyManagementService" }, - "shortName": "GetPublicKey" + "shortName": "GetRetiredResource" }, "parameters": [ { "name": "request", - "type": "google.cloud.kms_v1.types.GetPublicKeyRequest" + "type": "google.cloud.kms_v1.types.GetRetiredResourceRequest" }, { "name": "name", @@ -6273,14 +6756,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.kms_v1.types.PublicKey", - "shortName": "get_public_key" + "resultType": "google.cloud.kms_v1.types.RetiredResource", + "shortName": "get_retired_resource" }, - "description": "Sample for GetPublicKey", - "file": "cloudkms_v1_generated_key_management_service_get_public_key_sync.py", + "description": "Sample for GetRetiredResource", + "file": "cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudkms_v1_generated_KeyManagementService_GetPublicKey_sync", + "regionTag": "cloudkms_v1_generated_KeyManagementService_GetRetiredResource_sync", "segments": [ { "end": 51, @@ -6313,7 +6796,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudkms_v1_generated_key_management_service_get_public_key_sync.py" + "title": "cloudkms_v1_generated_key_management_service_get_retired_resource_sync.py" }, { "canonical": true, @@ -7112,6 +7595,167 @@ ], "title": "cloudkms_v1_generated_key_management_service_list_key_rings_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient", + "shortName": "KeyManagementServiceAsyncClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceAsyncClient.list_retired_resources", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.ListRetiredResources", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "ListRetiredResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.ListRetiredResourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.kms_v1.services.key_management_service.pagers.ListRetiredResourcesAsyncPager", + "shortName": "list_retired_resources" + }, + "description": "Sample for ListRetiredResources", + "file": "cloudkms_v1_generated_key_management_service_list_retired_resources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_ListRetiredResources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_list_retired_resources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient", + "shortName": "KeyManagementServiceClient" + }, + "fullName": "google.cloud.kms_v1.KeyManagementServiceClient.list_retired_resources", + "method": { + "fullName": "google.cloud.kms.v1.KeyManagementService.ListRetiredResources", + "service": { + "fullName": "google.cloud.kms.v1.KeyManagementService", + "shortName": "KeyManagementService" + }, + "shortName": "ListRetiredResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.kms_v1.types.ListRetiredResourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.kms_v1.services.key_management_service.pagers.ListRetiredResourcesPager", + "shortName": "list_retired_resources" + }, + "description": "Sample for ListRetiredResources", + "file": "cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudkms_v1_generated_KeyManagementService_ListRetiredResources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudkms_v1_generated_key_management_service_list_retired_resources_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py index bbd8980dcbf6..bdd409ddc3c7 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py @@ -1318,6 +1318,7 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) response = client.update_autokey_config(request) @@ -1333,6 +1334,10 @@ def test_update_autokey_config(request_type, transport: str = "grpc"): assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) def test_update_autokey_config_non_empty_request_with_auto_populated_field(): @@ -1468,6 +1473,7 @@ async def test_update_autokey_config_async( key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) ) response = await client.update_autokey_config(request) @@ -1484,6 +1490,10 @@ async def test_update_autokey_config_async( assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) @pytest.mark.asyncio @@ -1679,6 +1689,7 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) response = client.get_autokey_config(request) @@ -1694,6 +1705,10 @@ def test_get_autokey_config(request_type, transport: str = "grpc"): assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) def test_get_autokey_config_non_empty_request_with_auto_populated_field(): @@ -1831,6 +1846,7 @@ async def test_get_autokey_config_async( key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) ) response = await client.get_autokey_config(request) @@ -1847,6 +1863,10 @@ async def test_get_autokey_config_async( assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) @pytest.mark.asyncio @@ -3115,6 +3135,7 @@ async def test_update_autokey_config_empty_call_grpc_asyncio(): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) ) await client.update_autokey_config(request=None) @@ -3147,6 +3168,7 @@ async def test_get_autokey_config_empty_call_grpc_asyncio(): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) ) await client.get_autokey_config(request=None) @@ -3239,6 +3261,7 @@ def test_update_autokey_config_rest_call_success(request_type): "key_project": "key_project_value", "state": 1, "etag": "etag_value", + "key_project_resolution_mode": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3317,6 +3340,7 @@ def get_message_fields(field): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) # Wrap the value into a proper Response obj @@ -3337,6 +3361,10 @@ def get_message_fields(field): assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3453,6 +3481,7 @@ def test_get_autokey_config_rest_call_success(request_type): key_project="key_project_value", state=autokey_admin.AutokeyConfig.State.ACTIVE, etag="etag_value", + key_project_resolution_mode=autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT, ) # Wrap the value into a proper Response obj @@ -3473,6 +3502,10 @@ def test_get_autokey_config_rest_call_success(request_type): assert response.key_project == "key_project_value" assert response.state == autokey_admin.AutokeyConfig.State.ACTIVE assert response.etag == "etag_value" + assert ( + response.key_project_resolution_mode + == autokey_admin.AutokeyConfig.KeyProjectResolutionMode.DEDICATED_KEY_PROJECT + ) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py index ab83573a55ce..5084a320f50c 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py @@ -43,10 +43,19 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries +import google.api_core.operation_async as operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -57,6 +66,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account import google.protobuf.duration_pb2 as duration_pb2 # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.protobuf.wrappers_pb2 as wrappers_pb2 # type: ignore @@ -3522,11 +3532,11 @@ async def test_list_import_jobs_async_pages(): @pytest.mark.parametrize( "request_type", [ - service.GetKeyRingRequest, + service.ListRetiredResourcesRequest, dict, ], ) -def test_get_key_ring(request_type, transport: str = "grpc"): +def test_list_retired_resources(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3537,25 +3547,29 @@ def test_get_key_ring(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing( - name="name_value", + call.return_value = service.ListRetiredResourcesResponse( + next_page_token="next_page_token_value", + total_size=1086, ) - response = client.get_key_ring(request) + response = client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetKeyRingRequest() + request = service.ListRetiredResourcesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.KeyRing) - assert response.name == "name_value" + assert isinstance(response, pagers.ListRetiredResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_get_key_ring_non_empty_request_with_auto_populated_field(): +def test_list_retired_resources_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -3566,24 +3580,28 @@ def test_get_key_ring_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetKeyRingRequest( - name="name_value", + request = service.ListRetiredResourcesRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_key_ring(request=request) + client.list_retired_resources(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetKeyRingRequest( - name="name_value", + assert args[0] == service.ListRetiredResourcesRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_get_key_ring_use_cached_wrapped_rpc(): +def test_list_retired_resources_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3597,21 +3615,26 @@ def test_get_key_ring_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_key_ring in client._transport._wrapped_methods + assert ( + client._transport.list_retired_resources + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_key_ring] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_retired_resources + ] = mock_rpc request = {} - client.get_key_ring(request) + client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_key_ring(request) + client.list_retired_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3619,7 +3642,7 @@ def test_get_key_ring_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_key_ring_async_use_cached_wrapped_rpc( +async def test_list_retired_resources_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3636,7 +3659,7 @@ async def test_get_key_ring_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_key_ring + client._client._transport.list_retired_resources in client._client._transport._wrapped_methods ) @@ -3644,16 +3667,16 @@ async def test_get_key_ring_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_key_ring + client._client._transport.list_retired_resources ] = mock_rpc request = {} - await client.get_key_ring(request) + await client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_key_ring(request) + await client.list_retired_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3661,8 +3684,8 @@ async def test_get_key_ring_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_key_ring_async( - transport: str = "grpc_asyncio", request_type=service.GetKeyRingRequest +async def test_list_retired_resources_async( + transport: str = "grpc_asyncio", request_type=service.ListRetiredResourcesRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3674,46 +3697,52 @@ async def test_get_key_ring_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.KeyRing( - name="name_value", + service.ListRetiredResourcesResponse( + next_page_token="next_page_token_value", + total_size=1086, ) ) - response = await client.get_key_ring(request) + response = await client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetKeyRingRequest() + request = service.ListRetiredResourcesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.KeyRing) - assert response.name == "name_value" + assert isinstance(response, pagers.ListRetiredResourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 @pytest.mark.asyncio -async def test_get_key_ring_async_from_dict(): - await test_get_key_ring_async(request_type=dict) +async def test_list_retired_resources_async_from_dict(): + await test_list_retired_resources_async(request_type=dict) -def test_get_key_ring_field_headers(): +def test_list_retired_resources_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetKeyRingRequest() + request = service.ListRetiredResourcesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: - call.return_value = resources.KeyRing() - client.get_key_ring(request) + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + call.return_value = service.ListRetiredResourcesResponse() + client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3724,26 +3753,30 @@ def test_get_key_ring_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_key_ring_field_headers_async(): +async def test_list_retired_resources_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetKeyRingRequest() + request = service.ListRetiredResourcesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) - await client.get_key_ring(request) + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRetiredResourcesResponse() + ) + await client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3754,35 +3787,37 @@ async def test_get_key_ring_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_key_ring_flattened(): +def test_list_retired_resources_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing() + call.return_value = service.ListRetiredResourcesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_key_ring( - name="name_value", + client.list_retired_resources( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_key_ring_flattened_error(): +def test_list_retired_resources_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3790,41 +3825,45 @@ def test_get_key_ring_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_key_ring( - service.GetKeyRingRequest(), - name="name_value", + client.list_retired_resources( + service.ListRetiredResourcesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_key_ring_flattened_async(): +async def test_list_retired_resources_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing() + call.return_value = service.ListRetiredResourcesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRetiredResourcesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_key_ring( - name="name_value", + response = await client.list_retired_resources( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_key_ring_flattened_error_async(): +async def test_list_retired_resources_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3832,111 +3871,307 @@ async def test_get_key_ring_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_key_ring( - service.GetKeyRingRequest(), - name="name_value", + await client.list_retired_resources( + service.ListRetiredResourcesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.GetCryptoKeyRequest, - dict, - ], -) -def test_get_crypto_key(request_type, transport: str = "grpc"): +def test_list_retired_resources_pager(transport_name: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey( - name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + resources.RetiredResource(), + ], + next_page_token="abc", + ), + service.ListRetiredResourcesResponse( + retired_resources=[], + next_page_token="def", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + ], + next_page_token="ghi", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + ], + ), + RuntimeError, ) - response = client.get_crypto_key(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetCryptoKeyRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_retired_resources(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) - assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.RetiredResource) for i in results) -def test_get_crypto_key_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_list_retired_resources_pages(transport_name: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetCryptoKeyRequest( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + resources.RetiredResource(), + ], + next_page_token="abc", + ), + service.ListRetiredResourcesResponse( + retired_resources=[], + next_page_token="def", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + ], + next_page_token="ghi", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_retired_resources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_retired_resources_async_pager(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_retired_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + resources.RetiredResource(), + ], + next_page_token="abc", + ), + service.ListRetiredResourcesResponse( + retired_resources=[], + next_page_token="def", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + ], + next_page_token="ghi", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + ], + ), + RuntimeError, ) - client.get_crypto_key(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetCryptoKeyRequest( - name="name_value", + async_pager = await client.list_retired_resources( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.RetiredResource) for i in responses) -def test_get_crypto_key_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - # Should wrap all calls on client creation +@pytest.mark.asyncio +async def test_list_retired_resources_async_pages(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_retired_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + resources.RetiredResource(), + ], + next_page_token="abc", + ), + service.ListRetiredResourcesResponse( + retired_resources=[], + next_page_token="def", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + ], + next_page_token="ghi", + ), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_retired_resources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetKeyRingRequest, + dict, + ], +) +def test_get_key_ring(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.KeyRing( + name="name_value", + ) + response = client.get_key_ring(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetKeyRingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.KeyRing) + assert response.name == "name_value" + + +def test_get_key_ring_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetKeyRingRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_key_ring(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetKeyRingRequest( + name="name_value", + ) + + +def test_get_key_ring_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_crypto_key in client._transport._wrapped_methods + assert client._transport.get_key_ring in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_crypto_key] = mock_rpc + client._transport._wrapped_methods[client._transport.get_key_ring] = mock_rpc request = {} - client.get_crypto_key(request) + client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_crypto_key(request) + client.get_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3944,7 +4179,7 @@ def test_get_crypto_key_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_crypto_key_async_use_cached_wrapped_rpc( +async def test_get_key_ring_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3961,7 +4196,7 @@ async def test_get_crypto_key_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_crypto_key + client._client._transport.get_key_ring in client._client._transport._wrapped_methods ) @@ -3969,16 +4204,16 @@ async def test_get_crypto_key_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_crypto_key + client._client._transport.get_key_ring ] = mock_rpc request = {} - await client.get_crypto_key(request) + await client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_crypto_key(request) + await client.get_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3986,8 +4221,8 @@ async def test_get_crypto_key_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_crypto_key_async( - transport: str = "grpc_asyncio", request_type=service.GetCryptoKeyRequest +async def test_get_key_ring_async( + transport: str = "grpc_asyncio", request_type=service.GetKeyRingRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -3999,52 +4234,46 @@ async def test_get_crypto_key_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKey( + resources.KeyRing( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.get_crypto_key(request) + response = await client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetCryptoKeyRequest() + request = service.GetKeyRingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.KeyRing) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_get_crypto_key_async_from_dict(): - await test_get_crypto_key_async(request_type=dict) +async def test_get_key_ring_async_from_dict(): + await test_get_key_ring_async(request_type=dict) -def test_get_crypto_key_field_headers(): +def test_get_key_ring_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetCryptoKeyRequest() + request = service.GetKeyRingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: - call.return_value = resources.CryptoKey() - client.get_crypto_key(request) + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + call.return_value = resources.KeyRing() + client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4060,21 +4289,21 @@ def test_get_crypto_key_field_headers(): @pytest.mark.asyncio -async def test_get_crypto_key_field_headers_async(): +async def test_get_key_ring_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetCryptoKeyRequest() + request = service.GetKeyRingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) - await client.get_crypto_key(request) + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) + await client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4089,18 +4318,18 @@ async def test_get_crypto_key_field_headers_async(): ) in kw["metadata"] -def test_get_crypto_key_flattened(): +def test_get_key_ring_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = resources.KeyRing() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_crypto_key( + client.get_key_ring( name="name_value", ) @@ -4113,7 +4342,7 @@ def test_get_crypto_key_flattened(): assert arg == mock_val -def test_get_crypto_key_flattened_error(): +def test_get_key_ring_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4121,27 +4350,27 @@ def test_get_crypto_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_crypto_key( - service.GetCryptoKeyRequest(), + client.get_key_ring( + service.GetKeyRingRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_crypto_key_flattened_async(): +async def test_get_key_ring_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: + with mock.patch.object(type(client.transport.get_key_ring), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = resources.KeyRing() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_crypto_key( + response = await client.get_key_ring( name="name_value", ) @@ -4155,7 +4384,7 @@ async def test_get_crypto_key_flattened_async(): @pytest.mark.asyncio -async def test_get_crypto_key_flattened_error_async(): +async def test_get_key_ring_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4163,8 +4392,8 @@ async def test_get_crypto_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_crypto_key( - service.GetCryptoKeyRequest(), + await client.get_key_ring( + service.GetKeyRingRequest(), name="name_value", ) @@ -4172,11 +4401,11 @@ async def test_get_crypto_key_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.GetCryptoKeyVersionRequest, + service.GetCryptoKeyRequest, dict, ], ) -def test_get_crypto_key_version(request_type, transport: str = "grpc"): +def test_get_crypto_key(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4187,52 +4416,31 @@ def test_get_crypto_key_version(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( + call.return_value = resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) - response = client.get_crypto_key_version(request) + response = client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetCryptoKeyVersionRequest() + request = service.GetCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_get_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_get_crypto_key_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -4243,26 +4451,24 @@ def test_get_crypto_key_version_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetCryptoKeyVersionRequest( + request = service.GetCryptoKeyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_crypto_key_version(request=request) + client.get_crypto_key(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetCryptoKeyVersionRequest( + assert args[0] == service.GetCryptoKeyRequest( name="name_value", ) -def test_get_crypto_key_version_use_cached_wrapped_rpc(): +def test_get_crypto_key_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4276,26 +4482,21 @@ def test_get_crypto_key_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_crypto_key_version - in client._transport._wrapped_methods - ) + assert client._transport.get_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_crypto_key_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_crypto_key] = mock_rpc request = {} - client.get_crypto_key_version(request) + client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_crypto_key_version(request) + client.get_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4303,7 +4504,7 @@ def test_get_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_get_crypto_key_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4320,7 +4521,7 @@ async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_crypto_key_version + client._client._transport.get_crypto_key in client._client._transport._wrapped_methods ) @@ -4328,16 +4529,16 @@ async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_crypto_key_version + client._client._transport.get_crypto_key ] = mock_rpc request = {} - await client.get_crypto_key_version(request) + await client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_crypto_key_version(request) + await client.get_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4345,8 +4546,8 @@ async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.GetCryptoKeyVersionRequest +async def test_get_crypto_key_async( + transport: str = "grpc_asyncio", request_type=service.GetCryptoKeyRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4358,75 +4559,52 @@ async def test_get_crypto_key_version_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( + resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.get_crypto_key_version(request) + response = await client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetCryptoKeyVersionRequest() + request = service.GetCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_get_crypto_key_version_async_from_dict(): - await test_get_crypto_key_version_async(request_type=dict) +async def test_get_crypto_key_async_from_dict(): + await test_get_crypto_key_async(request_type=dict) -def test_get_crypto_key_version_field_headers(): +def test_get_crypto_key_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetCryptoKeyVersionRequest() + request = service.GetCryptoKeyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: - call.return_value = resources.CryptoKeyVersion() - client.get_crypto_key_version(request) + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: + call.return_value = resources.CryptoKey() + client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4442,25 +4620,21 @@ def test_get_crypto_key_version_field_headers(): @pytest.mark.asyncio -async def test_get_crypto_key_version_field_headers_async(): +async def test_get_crypto_key_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetCryptoKeyVersionRequest() + request = service.GetCryptoKeyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) - await client.get_crypto_key_version(request) + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + await client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4475,20 +4649,18 @@ async def test_get_crypto_key_version_field_headers_async(): ) in kw["metadata"] -def test_get_crypto_key_version_flattened(): +def test_get_crypto_key_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.CryptoKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_crypto_key_version( + client.get_crypto_key( name="name_value", ) @@ -4501,7 +4673,7 @@ def test_get_crypto_key_version_flattened(): assert arg == mock_val -def test_get_crypto_key_version_flattened_error(): +def test_get_crypto_key_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4509,31 +4681,27 @@ def test_get_crypto_key_version_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_crypto_key_version( - service.GetCryptoKeyVersionRequest(), + client.get_crypto_key( + service.GetCryptoKeyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_crypto_key_version_flattened_async(): +async def test_get_crypto_key_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_crypto_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.CryptoKey() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_crypto_key_version( + response = await client.get_crypto_key( name="name_value", ) @@ -4547,7 +4715,7 @@ async def test_get_crypto_key_version_flattened_async(): @pytest.mark.asyncio -async def test_get_crypto_key_version_flattened_error_async(): +async def test_get_crypto_key_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4555,8 +4723,8 @@ async def test_get_crypto_key_version_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_crypto_key_version( - service.GetCryptoKeyVersionRequest(), + await client.get_crypto_key( + service.GetCryptoKeyRequest(), name="name_value", ) @@ -4564,11 +4732,11 @@ async def test_get_crypto_key_version_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.GetPublicKeyRequest, + service.GetCryptoKeyVersionRequest, dict, ], ) -def test_get_public_key(request_type, transport: str = "grpc"): +def test_get_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4579,36 +4747,52 @@ def test_get_public_key(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PublicKey( - pem="pem_value", - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + call.return_value = resources.CryptoKeyVersion( name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, - public_key_format=resources.PublicKey.PublicKeyFormat.PEM, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) - response = client.get_public_key(request) + response = client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetPublicKeyRequest() + request = service.GetCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PublicKey) - assert response.pem == "pem_value" + assert isinstance(response, resources.CryptoKeyVersion) + assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE assert ( response.algorithm == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION ) - assert response.name == "name_value" - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_get_public_key_non_empty_request_with_auto_populated_field(): +def test_get_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -4619,24 +4803,26 @@ def test_get_public_key_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetPublicKeyRequest( + request = service.GetCryptoKeyVersionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_public_key(request=request) + client.get_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetPublicKeyRequest( + assert args[0] == service.GetCryptoKeyVersionRequest( name="name_value", ) -def test_get_public_key_use_cached_wrapped_rpc(): +def test_get_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4650,21 +4836,26 @@ def test_get_public_key_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_public_key in client._transport._wrapped_methods + assert ( + client._transport.get_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_public_key] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_crypto_key_version + ] = mock_rpc request = {} - client.get_public_key(request) + client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_public_key(request) + client.get_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4672,7 +4863,7 @@ def test_get_public_key_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_public_key_async_use_cached_wrapped_rpc( +async def test_get_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4689,7 +4880,7 @@ async def test_get_public_key_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_public_key + client._client._transport.get_crypto_key_version in client._client._transport._wrapped_methods ) @@ -4697,16 +4888,16 @@ async def test_get_public_key_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_public_key + client._client._transport.get_crypto_key_version ] = mock_rpc request = {} - await client.get_public_key(request) + await client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_public_key(request) + await client.get_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4714,8 +4905,8 @@ async def test_get_public_key_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_public_key_async( - transport: str = "grpc_asyncio", request_type=service.GetPublicKeyRequest +async def test_get_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.GetCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -4727,57 +4918,75 @@ async def test_get_public_key_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.PublicKey( - pem="pem_value", - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + resources.CryptoKeyVersion( name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, - public_key_format=resources.PublicKey.PublicKeyFormat.PEM, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.get_public_key(request) + response = await client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetPublicKeyRequest() + request = service.GetCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.PublicKey) - assert response.pem == "pem_value" + assert isinstance(response, resources.CryptoKeyVersion) + assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE assert ( response.algorithm == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION ) - assert response.name == "name_value" - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_get_public_key_async_from_dict(): - await test_get_public_key_async(request_type=dict) +async def test_get_crypto_key_version_async_from_dict(): + await test_get_crypto_key_version_async(request_type=dict) -def test_get_public_key_field_headers(): +def test_get_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetPublicKeyRequest() + request = service.GetCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: - call.return_value = resources.PublicKey() - client.get_public_key(request) + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: + call.return_value = resources.CryptoKeyVersion() + client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4793,21 +5002,25 @@ def test_get_public_key_field_headers(): @pytest.mark.asyncio -async def test_get_public_key_field_headers_async(): +async def test_get_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetPublicKeyRequest() + request = service.GetCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.PublicKey()) - await client.get_public_key(request) + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) + await client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4822,18 +5035,20 @@ async def test_get_public_key_field_headers_async(): ) in kw["metadata"] -def test_get_public_key_flattened(): +def test_get_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PublicKey() + call.return_value = resources.CryptoKeyVersion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_public_key( + client.get_crypto_key_version( name="name_value", ) @@ -4846,7 +5061,7 @@ def test_get_public_key_flattened(): assert arg == mock_val -def test_get_public_key_flattened_error(): +def test_get_crypto_key_version_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4854,27 +5069,31 @@ def test_get_public_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_public_key( - service.GetPublicKeyRequest(), + client.get_crypto_key_version( + service.GetCryptoKeyVersionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_public_key_flattened_async(): +async def test_get_crypto_key_version_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + with mock.patch.object( + type(client.transport.get_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.PublicKey() + call.return_value = resources.CryptoKeyVersion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.PublicKey()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_public_key( + response = await client.get_crypto_key_version( name="name_value", ) @@ -4888,7 +5107,7 @@ async def test_get_public_key_flattened_async(): @pytest.mark.asyncio -async def test_get_public_key_flattened_error_async(): +async def test_get_crypto_key_version_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4896,8 +5115,8 @@ async def test_get_public_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_public_key( - service.GetPublicKeyRequest(), + await client.get_crypto_key_version( + service.GetCryptoKeyVersionRequest(), name="name_value", ) @@ -4905,11 +5124,11 @@ async def test_get_public_key_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.GetImportJobRequest, + service.GetPublicKeyRequest, dict, ], ) -def test_get_import_job(request_type, transport: str = "grpc"): +def test_get_public_key(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4920,36 +5139,36 @@ def test_get_import_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob( + call.return_value = resources.PublicKey( + pem="pem_value", + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, name="name_value", - import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, protection_level=resources.ProtectionLevel.SOFTWARE, - state=resources.ImportJob.ImportJobState.PENDING_GENERATION, - crypto_key_backend="crypto_key_backend_value", + public_key_format=resources.PublicKey.PublicKeyFormat.PEM, ) - response = client.get_import_job(request) + response = client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetImportJobRequest() + request = service.GetPublicKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ImportJob) - assert response.name == "name_value" + assert isinstance(response, resources.PublicKey) + assert response.pem == "pem_value" assert ( - response.import_method - == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION ) + assert response.name == "name_value" assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION - assert response.crypto_key_backend == "crypto_key_backend_value" + assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM -def test_get_import_job_non_empty_request_with_auto_populated_field(): +def test_get_public_key_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -4960,24 +5179,24 @@ def test_get_import_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetImportJobRequest( + request = service.GetPublicKeyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_import_job(request=request) + client.get_public_key(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetImportJobRequest( + assert args[0] == service.GetPublicKeyRequest( name="name_value", ) -def test_get_import_job_use_cached_wrapped_rpc(): +def test_get_public_key_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4991,21 +5210,21 @@ def test_get_import_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_import_job in client._transport._wrapped_methods + assert client._transport.get_public_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_import_job] = mock_rpc + client._transport._wrapped_methods[client._transport.get_public_key] = mock_rpc request = {} - client.get_import_job(request) + client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_import_job(request) + client.get_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5013,7 +5232,7 @@ def test_get_import_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_import_job_async_use_cached_wrapped_rpc( +async def test_get_public_key_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5030,7 +5249,7 @@ async def test_get_import_job_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_import_job + client._client._transport.get_public_key in client._client._transport._wrapped_methods ) @@ -5038,16 +5257,16 @@ async def test_get_import_job_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_import_job + client._client._transport.get_public_key ] = mock_rpc request = {} - await client.get_import_job(request) + await client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_import_job(request) + await client.get_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5055,8 +5274,8 @@ async def test_get_import_job_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_import_job_async( - transport: str = "grpc_asyncio", request_type=service.GetImportJobRequest +async def test_get_public_key_async( + transport: str = "grpc_asyncio", request_type=service.GetPublicKeyRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -5068,57 +5287,57 @@ async def test_get_import_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ImportJob( + resources.PublicKey( + pem="pem_value", + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, name="name_value", - import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, protection_level=resources.ProtectionLevel.SOFTWARE, - state=resources.ImportJob.ImportJobState.PENDING_GENERATION, - crypto_key_backend="crypto_key_backend_value", + public_key_format=resources.PublicKey.PublicKeyFormat.PEM, ) ) - response = await client.get_import_job(request) + response = await client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetImportJobRequest() + request = service.GetPublicKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ImportJob) - assert response.name == "name_value" + assert isinstance(response, resources.PublicKey) + assert response.pem == "pem_value" assert ( - response.import_method - == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION ) + assert response.name == "name_value" assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION - assert response.crypto_key_backend == "crypto_key_backend_value" + assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM @pytest.mark.asyncio -async def test_get_import_job_async_from_dict(): - await test_get_import_job_async(request_type=dict) +async def test_get_public_key_async_from_dict(): + await test_get_public_key_async(request_type=dict) -def test_get_import_job_field_headers(): +def test_get_public_key_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetImportJobRequest() + request = service.GetPublicKeyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: - call.return_value = resources.ImportJob() - client.get_import_job(request) + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + call.return_value = resources.PublicKey() + client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5134,21 +5353,21 @@ def test_get_import_job_field_headers(): @pytest.mark.asyncio -async def test_get_import_job_field_headers_async(): +async def test_get_public_key_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetImportJobRequest() + request = service.GetPublicKeyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) - await client.get_import_job(request) + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.PublicKey()) + await client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5163,18 +5382,18 @@ async def test_get_import_job_field_headers_async(): ) in kw["metadata"] -def test_get_import_job_flattened(): +def test_get_public_key_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob() + call.return_value = resources.PublicKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_import_job( + client.get_public_key( name="name_value", ) @@ -5187,7 +5406,7 @@ def test_get_import_job_flattened(): assert arg == mock_val -def test_get_import_job_flattened_error(): +def test_get_public_key_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5195,27 +5414,27 @@ def test_get_import_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_import_job( - service.GetImportJobRequest(), + client.get_public_key( + service.GetPublicKeyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_import_job_flattened_async(): +async def test_get_public_key_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_public_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob() + call.return_value = resources.PublicKey() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.PublicKey()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_import_job( + response = await client.get_public_key( name="name_value", ) @@ -5229,7 +5448,7 @@ async def test_get_import_job_flattened_async(): @pytest.mark.asyncio -async def test_get_import_job_flattened_error_async(): +async def test_get_public_key_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5237,8 +5456,8 @@ async def test_get_import_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_import_job( - service.GetImportJobRequest(), + await client.get_public_key( + service.GetPublicKeyRequest(), name="name_value", ) @@ -5246,11 +5465,11 @@ async def test_get_import_job_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.CreateKeyRingRequest, + service.GetImportJobRequest, dict, ], ) -def test_create_key_ring(request_type, transport: str = "grpc"): +def test_get_import_job(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5261,25 +5480,36 @@ def test_create_key_ring(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing( + call.return_value = resources.ImportJob( name="name_value", + import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, + protection_level=resources.ProtectionLevel.SOFTWARE, + state=resources.ImportJob.ImportJobState.PENDING_GENERATION, + crypto_key_backend="crypto_key_backend_value", ) - response = client.create_key_ring(request) + response = client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateKeyRingRequest() + request = service.GetImportJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.KeyRing) + assert isinstance(response, resources.ImportJob) assert response.name == "name_value" + assert ( + response.import_method + == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_create_key_ring_non_empty_request_with_auto_populated_field(): +def test_get_import_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -5290,26 +5520,24 @@ def test_create_key_ring_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateKeyRingRequest( - parent="parent_value", - key_ring_id="key_ring_id_value", + request = service.GetImportJobRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_key_ring(request=request) + client.get_import_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateKeyRingRequest( - parent="parent_value", - key_ring_id="key_ring_id_value", + assert args[0] == service.GetImportJobRequest( + name="name_value", ) -def test_create_key_ring_use_cached_wrapped_rpc(): +def test_get_import_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5323,21 +5551,21 @@ def test_create_key_ring_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_key_ring in client._transport._wrapped_methods + assert client._transport.get_import_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_key_ring] = mock_rpc + client._transport._wrapped_methods[client._transport.get_import_job] = mock_rpc request = {} - client.create_key_ring(request) + client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_key_ring(request) + client.get_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5345,7 +5573,7 @@ def test_create_key_ring_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_key_ring_async_use_cached_wrapped_rpc( +async def test_get_import_job_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5362,7 +5590,7 @@ async def test_create_key_ring_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_key_ring + client._client._transport.get_import_job in client._client._transport._wrapped_methods ) @@ -5370,16 +5598,16 @@ async def test_create_key_ring_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_key_ring + client._client._transport.get_import_job ] = mock_rpc request = {} - await client.create_key_ring(request) + await client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_key_ring(request) + await client.get_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5387,8 +5615,8 @@ async def test_create_key_ring_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_key_ring_async( - transport: str = "grpc_asyncio", request_type=service.CreateKeyRingRequest +async def test_get_import_job_async( + transport: str = "grpc_asyncio", request_type=service.GetImportJobRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -5400,46 +5628,57 @@ async def test_create_key_ring_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.KeyRing( + resources.ImportJob( name="name_value", + import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, + protection_level=resources.ProtectionLevel.SOFTWARE, + state=resources.ImportJob.ImportJobState.PENDING_GENERATION, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.create_key_ring(request) + response = await client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateKeyRingRequest() + request = service.GetImportJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.KeyRing) + assert isinstance(response, resources.ImportJob) assert response.name == "name_value" + assert ( + response.import_method + == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_create_key_ring_async_from_dict(): - await test_create_key_ring_async(request_type=dict) +async def test_get_import_job_async_from_dict(): + await test_get_import_job_async(request_type=dict) -def test_create_key_ring_field_headers(): +def test_get_import_job_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateKeyRingRequest() + request = service.GetImportJobRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: - call.return_value = resources.KeyRing() - client.create_key_ring(request) + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + call.return_value = resources.ImportJob() + client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5450,26 +5689,26 @@ def test_create_key_ring_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_key_ring_field_headers_async(): +async def test_get_import_job_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateKeyRingRequest() + request = service.GetImportJobRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) - await client.create_key_ring(request) + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) + await client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5480,43 +5719,35 @@ async def test_create_key_ring_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_key_ring_flattened(): +def test_get_import_job_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing() + call.return_value = resources.ImportJob() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_key_ring( - parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + client.get_import_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].key_ring_id - mock_val = "key_ring_id_value" - assert arg == mock_val - arg = args[0].key_ring - mock_val = resources.KeyRing(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_key_ring_flattened_error(): +def test_get_import_job_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5524,51 +5755,41 @@ def test_create_key_ring_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_key_ring( - service.CreateKeyRingRequest(), - parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + client.get_import_job( + service.GetImportJobRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_key_ring_flattened_async(): +async def test_get_import_job_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + with mock.patch.object(type(client.transport.get_import_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.KeyRing() + call.return_value = resources.ImportJob() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_key_ring( - parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + response = await client.get_import_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].key_ring_id - mock_val = "key_ring_id_value" - assert arg == mock_val - arg = args[0].key_ring - mock_val = resources.KeyRing(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_key_ring_flattened_error_async(): +async def test_get_import_job_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5576,22 +5797,20 @@ async def test_create_key_ring_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_key_ring( - service.CreateKeyRingRequest(), - parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + await client.get_import_job( + service.GetImportJobRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.CreateCryptoKeyRequest, + service.GetRetiredResourceRequest, dict, ], ) -def test_create_crypto_key(request_type, transport: str = "grpc"): +def test_get_retired_resource(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5603,32 +5822,30 @@ def test_create_crypto_key(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey( + call.return_value = resources.RetiredResource( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + original_resource="original_resource_value", + resource_type="resource_type_value", ) - response = client.create_crypto_key(request) + response = client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateCryptoKeyRequest() + request = service.GetRetiredResourceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.RetiredResource) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert response.original_resource == "original_resource_value" + assert response.resource_type == "resource_type_value" -def test_create_crypto_key_non_empty_request_with_auto_populated_field(): +def test_get_retired_resource_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -5639,28 +5856,26 @@ def test_create_crypto_key_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateCryptoKeyRequest( - parent="parent_value", - crypto_key_id="crypto_key_id_value", + request = service.GetRetiredResourceRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_crypto_key(request=request) + client.get_retired_resource(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateCryptoKeyRequest( - parent="parent_value", - crypto_key_id="crypto_key_id_value", + assert args[0] == service.GetRetiredResourceRequest( + name="name_value", ) -def test_create_crypto_key_use_cached_wrapped_rpc(): +def test_get_retired_resource_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5674,7 +5889,9 @@ def test_create_crypto_key_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_crypto_key in client._transport._wrapped_methods + assert ( + client._transport.get_retired_resource in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5682,15 +5899,15 @@ def test_create_crypto_key_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_crypto_key + client._transport.get_retired_resource ] = mock_rpc request = {} - client.create_crypto_key(request) + client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_crypto_key(request) + client.get_retired_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5698,7 +5915,7 @@ def test_create_crypto_key_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_crypto_key_async_use_cached_wrapped_rpc( +async def test_get_retired_resource_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5715,7 +5932,7 @@ async def test_create_crypto_key_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_crypto_key + client._client._transport.get_retired_resource in client._client._transport._wrapped_methods ) @@ -5723,16 +5940,16 @@ async def test_create_crypto_key_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_crypto_key + client._client._transport.get_retired_resource ] = mock_rpc request = {} - await client.create_crypto_key(request) + await client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_crypto_key(request) + await client.get_retired_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5740,8 +5957,8 @@ async def test_create_crypto_key_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_crypto_key_async( - transport: str = "grpc_asyncio", request_type=service.CreateCryptoKeyRequest +async def test_get_retired_resource_async( + transport: str = "grpc_asyncio", request_type=service.GetRetiredResourceRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -5754,55 +5971,53 @@ async def test_create_crypto_key_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKey( + resources.RetiredResource( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + original_resource="original_resource_value", + resource_type="resource_type_value", ) ) - response = await client.create_crypto_key(request) + response = await client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateCryptoKeyRequest() + request = service.GetRetiredResourceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.RetiredResource) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert response.original_resource == "original_resource_value" + assert response.resource_type == "resource_type_value" @pytest.mark.asyncio -async def test_create_crypto_key_async_from_dict(): - await test_create_crypto_key_async(request_type=dict) +async def test_get_retired_resource_async_from_dict(): + await test_get_retired_resource_async(request_type=dict) -def test_create_crypto_key_field_headers(): +def test_get_retired_resource_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateCryptoKeyRequest() + request = service.GetRetiredResourceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: - call.return_value = resources.CryptoKey() - client.create_crypto_key(request) + call.return_value = resources.RetiredResource() + client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5813,28 +6028,30 @@ def test_create_crypto_key_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_crypto_key_field_headers_async(): +async def test_get_retired_resource_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateCryptoKeyRequest() + request = service.GetRetiredResourceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) - await client.create_crypto_key(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RetiredResource() + ) + await client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5845,45 +6062,37 @@ async def test_create_crypto_key_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_crypto_key_flattened(): +def test_get_retired_resource_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = resources.RetiredResource() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_crypto_key( - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + client.get_retired_resource( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].crypto_key_id - mock_val = "crypto_key_id_value" - assert arg == mock_val - arg = args[0].crypto_key - mock_val = resources.CryptoKey(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_crypto_key_flattened_error(): +def test_get_retired_resource_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5891,53 +6100,45 @@ def test_create_crypto_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_crypto_key( - service.CreateCryptoKeyRequest(), - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + client.get_retired_resource( + service.GetRetiredResourceRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_crypto_key_flattened_async(): +async def test_get_retired_resource_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_crypto_key), "__call__" + type(client.transport.get_retired_resource), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = resources.RetiredResource() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RetiredResource() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_crypto_key( - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + response = await client.get_retired_resource( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].crypto_key_id - mock_val = "crypto_key_id_value" - assert arg == mock_val - arg = args[0].crypto_key - mock_val = resources.CryptoKey(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_crypto_key_flattened_error_async(): +async def test_get_retired_resource_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5945,22 +6146,20 @@ async def test_create_crypto_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_crypto_key( - service.CreateCryptoKeyRequest(), - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + await client.get_retired_resource( + service.GetRetiredResourceRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.CreateCryptoKeyVersionRequest, + service.CreateKeyRingRequest, dict, ], ) -def test_create_crypto_key_version(request_type, transport: str = "grpc"): +def test_create_key_ring(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5971,52 +6170,25 @@ def test_create_crypto_key_version(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( + call.return_value = resources.KeyRing( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, ) - response = client.create_crypto_key_version(request) + response = client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateCryptoKeyVersionRequest() + request = service.CreateKeyRingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.KeyRing) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True -def test_create_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_create_key_ring_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -6027,26 +6199,26 @@ def test_create_crypto_key_version_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateCryptoKeyVersionRequest( + request = service.CreateKeyRingRequest( parent="parent_value", + key_ring_id="key_ring_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_crypto_key_version(request=request) + client.create_key_ring(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateCryptoKeyVersionRequest( + assert args[0] == service.CreateKeyRingRequest( parent="parent_value", + key_ring_id="key_ring_id_value", ) -def test_create_crypto_key_version_use_cached_wrapped_rpc(): +def test_create_key_ring_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6060,26 +6232,21 @@ def test_create_crypto_key_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_crypto_key_version - in client._transport._wrapped_methods - ) + assert client._transport.create_key_ring in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_crypto_key_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_key_ring] = mock_rpc request = {} - client.create_crypto_key_version(request) + client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_crypto_key_version(request) + client.create_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6087,7 +6254,7 @@ def test_create_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_create_key_ring_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6104,7 +6271,7 @@ async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_crypto_key_version + client._client._transport.create_key_ring in client._client._transport._wrapped_methods ) @@ -6112,16 +6279,16 @@ async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_crypto_key_version + client._client._transport.create_key_ring ] = mock_rpc request = {} - await client.create_crypto_key_version(request) + await client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_crypto_key_version(request) + await client.create_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6129,8 +6296,8 @@ async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.CreateCryptoKeyVersionRequest +async def test_create_key_ring_async( + transport: str = "grpc_asyncio", request_type=service.CreateKeyRingRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6142,75 +6309,46 @@ async def test_create_crypto_key_version_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( + resources.KeyRing( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, ) ) - response = await client.create_crypto_key_version(request) + response = await client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateCryptoKeyVersionRequest() + request = service.CreateKeyRingRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.KeyRing) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_create_crypto_key_version_async_from_dict(): - await test_create_crypto_key_version_async(request_type=dict) +async def test_create_key_ring_async_from_dict(): + await test_create_key_ring_async(request_type=dict) -def test_create_crypto_key_version_field_headers(): +def test_create_key_ring_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateCryptoKeyVersionRequest() + request = service.CreateKeyRingRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: - call.return_value = resources.CryptoKeyVersion() - client.create_crypto_key_version(request) + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + call.return_value = resources.KeyRing() + client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6226,25 +6364,21 @@ def test_create_crypto_key_version_field_headers(): @pytest.mark.asyncio -async def test_create_crypto_key_version_field_headers_async(): +async def test_create_key_ring_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateCryptoKeyVersionRequest() + request = service.CreateKeyRingRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) - await client.create_crypto_key_version(request) + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) + await client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6259,22 +6393,21 @@ async def test_create_crypto_key_version_field_headers_async(): ) in kw["metadata"] -def test_create_crypto_key_version_flattened(): +def test_create_key_ring_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.KeyRing() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_crypto_key_version( + client.create_key_ring( parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -6284,12 +6417,15 @@ def test_create_crypto_key_version_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].crypto_key_version - mock_val = resources.CryptoKeyVersion(name="name_value") + arg = args[0].key_ring_id + mock_val = "key_ring_id_value" + assert arg == mock_val + arg = args[0].key_ring + mock_val = resources.KeyRing(name="name_value") assert arg == mock_val -def test_create_crypto_key_version_flattened_error(): +def test_create_key_ring_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6297,34 +6433,32 @@ def test_create_crypto_key_version_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_crypto_key_version( - service.CreateCryptoKeyVersionRequest(), + client.create_key_ring( + service.CreateKeyRingRequest(), parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) @pytest.mark.asyncio -async def test_create_crypto_key_version_flattened_async(): +async def test_create_key_ring_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_crypto_key_version), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_key_ring), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.KeyRing() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.KeyRing()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_crypto_key_version( + response = await client.create_key_ring( parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -6334,13 +6468,16 @@ async def test_create_crypto_key_version_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].crypto_key_version - mock_val = resources.CryptoKeyVersion(name="name_value") + arg = args[0].key_ring_id + mock_val = "key_ring_id_value" + assert arg == mock_val + arg = args[0].key_ring + mock_val = resources.KeyRing(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_crypto_key_version_flattened_error_async(): +async def test_create_key_ring_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -6348,21 +6485,22 @@ async def test_create_crypto_key_version_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_crypto_key_version( - service.CreateCryptoKeyVersionRequest(), + await client.create_key_ring( + service.CreateKeyRingRequest(), parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - service.ImportCryptoKeyVersionRequest, + service.CreateCryptoKeyRequest, dict, ], ) -def test_import_crypto_key_version(request_type, transport: str = "grpc"): +def test_create_crypto_key(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6374,51 +6512,32 @@ def test_import_crypto_key_version(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.import_crypto_key_version), "__call__" + type(client.transport.create_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( + call.return_value = resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) - response = client.import_crypto_key_version(request) + response = client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ImportCryptoKeyVersionRequest() + request = service.CreateCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_import_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_create_crypto_key_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -6429,30 +6548,28 @@ def test_import_crypto_key_version_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ImportCryptoKeyVersionRequest( + request = service.CreateCryptoKeyRequest( parent="parent_value", - crypto_key_version="crypto_key_version_value", - import_job="import_job_value", + crypto_key_id="crypto_key_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.import_crypto_key_version), "__call__" + type(client.transport.create_crypto_key), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.import_crypto_key_version(request=request) + client.create_crypto_key(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ImportCryptoKeyVersionRequest( + assert args[0] == service.CreateCryptoKeyRequest( parent="parent_value", - crypto_key_version="crypto_key_version_value", - import_job="import_job_value", + crypto_key_id="crypto_key_id_value", ) -def test_import_crypto_key_version_use_cached_wrapped_rpc(): +def test_create_crypto_key_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6466,10 +6583,7 @@ def test_import_crypto_key_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.import_crypto_key_version - in client._transport._wrapped_methods - ) + assert client._transport.create_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6477,15 +6591,15 @@ def test_import_crypto_key_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.import_crypto_key_version + client._transport.create_crypto_key ] = mock_rpc request = {} - client.import_crypto_key_version(request) + client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.import_crypto_key_version(request) + client.create_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6493,7 +6607,7 @@ def test_import_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_create_crypto_key_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6510,7 +6624,7 @@ async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.import_crypto_key_version + client._client._transport.create_crypto_key in client._client._transport._wrapped_methods ) @@ -6518,16 +6632,16 @@ async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.import_crypto_key_version + client._client._transport.create_crypto_key ] = mock_rpc request = {} - await client.import_crypto_key_version(request) + await client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.import_crypto_key_version(request) + await client.create_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6535,8 +6649,8 @@ async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_import_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.ImportCryptoKeyVersionRequest +async def test_create_crypto_key_async( + transport: str = "grpc_asyncio", request_type=service.CreateCryptoKeyRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6549,74 +6663,55 @@ async def test_import_crypto_key_version_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.import_crypto_key_version), "__call__" + type(client.transport.create_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( + resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.import_crypto_key_version(request) + response = await client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ImportCryptoKeyVersionRequest() + request = service.CreateCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_import_crypto_key_version_async_from_dict(): - await test_import_crypto_key_version_async(request_type=dict) +async def test_create_crypto_key_async_from_dict(): + await test_create_crypto_key_async(request_type=dict) -def test_import_crypto_key_version_field_headers(): +def test_create_crypto_key_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ImportCryptoKeyVersionRequest() + request = service.CreateCryptoKeyRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.import_crypto_key_version), "__call__" + type(client.transport.create_crypto_key), "__call__" ) as call: - call.return_value = resources.CryptoKeyVersion() - client.import_crypto_key_version(request) + call.return_value = resources.CryptoKey() + client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6632,25 +6727,23 @@ def test_import_crypto_key_version_field_headers(): @pytest.mark.asyncio -async def test_import_crypto_key_version_field_headers_async(): +async def test_create_crypto_key_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ImportCryptoKeyVersionRequest() + request = service.CreateCryptoKeyRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.import_crypto_key_version), "__call__" + type(client.transport.create_crypto_key), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) - await client.import_crypto_key_version(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + await client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6665,14 +6758,118 @@ async def test_import_crypto_key_version_field_headers_async(): ) in kw["metadata"] +def test_create_crypto_key_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_crypto_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKey() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_crypto_key( + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].crypto_key_id + mock_val = "crypto_key_id_value" + assert arg == mock_val + arg = args[0].crypto_key + mock_val = resources.CryptoKey(name="name_value") + assert arg == mock_val + + +def test_create_crypto_key_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_crypto_key( + service.CreateCryptoKeyRequest(), + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_crypto_key_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_crypto_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKey() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_crypto_key( + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].crypto_key_id + mock_val = "crypto_key_id_value" + assert arg == mock_val + arg = args[0].crypto_key + mock_val = resources.CryptoKey(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_crypto_key_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_crypto_key( + service.CreateCryptoKeyRequest(), + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), + ) + + @pytest.mark.parametrize( "request_type", [ - service.CreateImportJobRequest, + service.CreateCryptoKeyVersionRequest, dict, ], ) -def test_create_import_job(request_type, transport: str = "grpc"): +def test_create_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6684,37 +6881,51 @@ def test_create_import_job(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob( + call.return_value = resources.CryptoKeyVersion( name="name_value", - import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, - state=resources.ImportJob.ImportJobState.PENDING_GENERATION, - crypto_key_backend="crypto_key_backend_value", + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) - response = client.create_import_job(request) + response = client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateImportJobRequest() + request = service.CreateCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ImportJob) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" assert ( - response.import_method - == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION - assert response.crypto_key_backend == "crypto_key_backend_value" + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_create_import_job_non_empty_request_with_auto_populated_field(): +def test_create_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -6725,28 +6936,26 @@ def test_create_import_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateImportJobRequest( + request = service.CreateCryptoKeyVersionRequest( parent="parent_value", - import_job_id="import_job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_import_job(request=request) + client.create_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateImportJobRequest( + assert args[0] == service.CreateCryptoKeyVersionRequest( parent="parent_value", - import_job_id="import_job_id_value", ) -def test_create_import_job_use_cached_wrapped_rpc(): +def test_create_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6760,7 +6969,10 @@ def test_create_import_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_import_job in client._transport._wrapped_methods + assert ( + client._transport.create_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6768,15 +6980,15 @@ def test_create_import_job_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_import_job + client._transport.create_crypto_key_version ] = mock_rpc request = {} - client.create_import_job(request) + client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_import_job(request) + client.create_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6784,7 +6996,7 @@ def test_create_import_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_import_job_async_use_cached_wrapped_rpc( +async def test_create_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6801,7 +7013,7 @@ async def test_create_import_job_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_import_job + client._client._transport.create_crypto_key_version in client._client._transport._wrapped_methods ) @@ -6809,16 +7021,16 @@ async def test_create_import_job_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_import_job + client._client._transport.create_crypto_key_version ] = mock_rpc request = {} - await client.create_import_job(request) + await client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_import_job(request) + await client.create_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6826,8 +7038,8 @@ async def test_create_import_job_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_import_job_async( - transport: str = "grpc_asyncio", request_type=service.CreateImportJobRequest +async def test_create_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.CreateCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6840,60 +7052,74 @@ async def test_create_import_job_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ImportJob( + resources.CryptoKeyVersion( name="name_value", - import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, - state=resources.ImportJob.ImportJobState.PENDING_GENERATION, - crypto_key_backend="crypto_key_backend_value", + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.create_import_job(request) + response = await client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateImportJobRequest() + request = service.CreateCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ImportJob) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" assert ( - response.import_method - == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION - assert response.crypto_key_backend == "crypto_key_backend_value" + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_create_import_job_async_from_dict(): - await test_create_import_job_async(request_type=dict) +async def test_create_crypto_key_version_async_from_dict(): + await test_create_crypto_key_version_async(request_type=dict) -def test_create_import_job_field_headers(): +def test_create_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateImportJobRequest() + request = service.CreateCryptoKeyVersionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: - call.return_value = resources.ImportJob() - client.create_import_job(request) + call.return_value = resources.CryptoKeyVersion() + client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6909,23 +7135,25 @@ def test_create_import_job_field_headers(): @pytest.mark.asyncio -async def test_create_import_job_field_headers_async(): +async def test_create_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateImportJobRequest() + request = service.CreateCryptoKeyVersionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) - await client.create_import_job(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) + await client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6940,23 +7168,22 @@ async def test_create_import_job_field_headers_async(): ) in kw["metadata"] -def test_create_import_job_flattened(): +def test_create_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob() + call.return_value = resources.CryptoKeyVersion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_import_job( + client.create_crypto_key_version( parent="parent_value", - import_job_id="import_job_id_value", - import_job=resources.ImportJob(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -6966,15 +7193,12 @@ def test_create_import_job_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].import_job_id - mock_val = "import_job_id_value" - assert arg == mock_val - arg = args[0].import_job - mock_val = resources.ImportJob(name="name_value") + arg = args[0].crypto_key_version + mock_val = resources.CryptoKeyVersion(name="name_value") assert arg == mock_val -def test_create_import_job_flattened_error(): +def test_create_crypto_key_version_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6982,34 +7206,34 @@ def test_create_import_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_import_job( - service.CreateImportJobRequest(), + client.create_crypto_key_version( + service.CreateCryptoKeyVersionRequest(), parent="parent_value", - import_job_id="import_job_id_value", - import_job=resources.ImportJob(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) @pytest.mark.asyncio -async def test_create_import_job_flattened_async(): +async def test_create_crypto_key_version_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_import_job), "__call__" + type(client.transport.create_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ImportJob() + call.return_value = resources.CryptoKeyVersion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_import_job( + response = await client.create_crypto_key_version( parent="parent_value", - import_job_id="import_job_id_value", - import_job=resources.ImportJob(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -7019,16 +7243,13 @@ async def test_create_import_job_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].import_job_id - mock_val = "import_job_id_value" - assert arg == mock_val - arg = args[0].import_job - mock_val = resources.ImportJob(name="name_value") + arg = args[0].crypto_key_version + mock_val = resources.CryptoKeyVersion(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_create_import_job_flattened_error_async(): +async def test_create_crypto_key_version_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7036,22 +7257,21 @@ async def test_create_import_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_import_job( - service.CreateImportJobRequest(), + await client.create_crypto_key_version( + service.CreateCryptoKeyVersionRequest(), parent="parent_value", - import_job_id="import_job_id_value", - import_job=resources.ImportJob(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - service.UpdateCryptoKeyRequest, + service.DeleteCryptoKeyRequest, dict, ], ) -def test_update_crypto_key(request_type, transport: str = "grpc"): +def test_delete_crypto_key(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7063,32 +7283,23 @@ def test_update_crypto_key(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey( - name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", - ) - response = client.update_crypto_key(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyRequest() + request = service.DeleteCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) - assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert isinstance(response, future.Future) -def test_update_crypto_key_non_empty_request_with_auto_populated_field(): +def test_delete_crypto_key_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -7099,22 +7310,26 @@ def test_update_crypto_key_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateCryptoKeyRequest() + request = service.DeleteCryptoKeyRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_crypto_key(request=request) + client.delete_crypto_key(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateCryptoKeyRequest() + assert args[0] == service.DeleteCryptoKeyRequest( + name="name_value", + ) -def test_update_crypto_key_use_cached_wrapped_rpc(): +def test_delete_crypto_key_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7128,7 +7343,7 @@ def test_update_crypto_key_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_crypto_key in client._transport._wrapped_methods + assert client._transport.delete_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7136,15 +7351,20 @@ def test_update_crypto_key_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_crypto_key + client._transport.delete_crypto_key ] = mock_rpc request = {} - client.update_crypto_key(request) + client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_crypto_key(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7152,7 +7372,7 @@ def test_update_crypto_key_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_crypto_key_async_use_cached_wrapped_rpc( +async def test_delete_crypto_key_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7169,7 +7389,7 @@ async def test_update_crypto_key_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_crypto_key + client._client._transport.delete_crypto_key in client._client._transport._wrapped_methods ) @@ -7177,16 +7397,21 @@ async def test_update_crypto_key_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_crypto_key + client._client._transport.delete_crypto_key ] = mock_rpc request = {} - await client.update_crypto_key(request) + await client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_crypto_key(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7194,8 +7419,8 @@ async def test_update_crypto_key_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_crypto_key_async( - transport: str = "grpc_asyncio", request_type=service.UpdateCryptoKeyRequest +async def test_delete_crypto_key_async( + transport: str = "grpc_asyncio", request_type=service.DeleteCryptoKeyRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -7208,55 +7433,46 @@ async def test_update_crypto_key_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKey( - name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_crypto_key(request) + response = await client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyRequest() + request = service.DeleteCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) - assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_crypto_key_async_from_dict(): - await test_update_crypto_key_async(request_type=dict) +async def test_delete_crypto_key_async_from_dict(): + await test_delete_crypto_key_async(request_type=dict) -def test_update_crypto_key_field_headers(): +def test_delete_crypto_key_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyRequest() + request = service.DeleteCryptoKeyRequest() - request.crypto_key.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: - call.return_value = resources.CryptoKey() - client.update_crypto_key(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7267,28 +7483,30 @@ def test_update_crypto_key_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "crypto_key.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_crypto_key_field_headers_async(): +async def test_delete_crypto_key_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyRequest() + request = service.DeleteCryptoKeyRequest() - request.crypto_key.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) - await client.update_crypto_key(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7299,41 +7517,37 @@ async def test_update_crypto_key_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "crypto_key.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_crypto_key_flattened(): +def test_delete_crypto_key_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_crypto_key( - crypto_key=resources.CryptoKey(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_crypto_key( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].crypto_key - mock_val = resources.CryptoKey(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_crypto_key_flattened_error(): +def test_delete_crypto_key_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7341,48 +7555,45 @@ def test_update_crypto_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_crypto_key( - service.UpdateCryptoKeyRequest(), - crypto_key=resources.CryptoKey(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_crypto_key( + service.DeleteCryptoKeyRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_crypto_key_flattened_async(): +async def test_delete_crypto_key_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key), "__call__" + type(client.transport.delete_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_crypto_key( - crypto_key=resources.CryptoKey(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.delete_crypto_key( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].crypto_key - mock_val = resources.CryptoKey(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_crypto_key_flattened_error_async(): +async def test_delete_crypto_key_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7390,21 +7601,20 @@ async def test_update_crypto_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_crypto_key( - service.UpdateCryptoKeyRequest(), - crypto_key=resources.CryptoKey(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.delete_crypto_key( + service.DeleteCryptoKeyRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateCryptoKeyVersionRequest, + service.DeleteCryptoKeyVersionRequest, dict, ], ) -def test_update_crypto_key_version(request_type, transport: str = "grpc"): +def test_delete_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7416,51 +7626,23 @@ def test_update_crypto_key_version(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( - name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, - ) - response = client.update_crypto_key_version(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) - assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert isinstance(response, future.Future) -def test_update_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_delete_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -7471,22 +7653,26 @@ def test_update_crypto_key_version_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_crypto_key_version(request=request) + client.delete_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateCryptoKeyVersionRequest() + assert args[0] == service.DeleteCryptoKeyVersionRequest( + name="name_value", + ) -def test_update_crypto_key_version_use_cached_wrapped_rpc(): +def test_delete_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7501,7 +7687,7 @@ def test_update_crypto_key_version_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_crypto_key_version + client._transport.delete_crypto_key_version in client._transport._wrapped_methods ) @@ -7511,15 +7697,20 @@ def test_update_crypto_key_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_crypto_key_version + client._transport.delete_crypto_key_version ] = mock_rpc request = {} - client.update_crypto_key_version(request) + client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_crypto_key_version(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7527,7 +7718,7 @@ def test_update_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_delete_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7544,7 +7735,7 @@ async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_crypto_key_version + client._client._transport.delete_crypto_key_version in client._client._transport._wrapped_methods ) @@ -7552,16 +7743,21 @@ async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_crypto_key_version + client._client._transport.delete_crypto_key_version ] = mock_rpc request = {} - await client.update_crypto_key_version(request) + await client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_crypto_key_version(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7569,8 +7765,8 @@ async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.UpdateCryptoKeyVersionRequest +async def test_delete_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.DeleteCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -7583,74 +7779,46 @@ async def test_update_crypto_key_version_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( - name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_crypto_key_version(request) + response = await client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) - assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_crypto_key_version_async_from_dict(): - await test_update_crypto_key_version_async(request_type=dict) +async def test_delete_crypto_key_version_async_from_dict(): + await test_delete_crypto_key_version_async(request_type=dict) -def test_update_crypto_key_version_field_headers(): +def test_delete_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest() - request.crypto_key_version.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: - call.return_value = resources.CryptoKeyVersion() - client.update_crypto_key_version(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7661,30 +7829,30 @@ def test_update_crypto_key_version_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "crypto_key_version.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_crypto_key_version_field_headers_async(): +async def test_delete_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest() - request.crypto_key_version.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() + operations_pb2.Operation(name="operations/op") ) - await client.update_crypto_key_version(request) + await client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7695,41 +7863,37 @@ async def test_update_crypto_key_version_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "crypto_key_version.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_crypto_key_version_flattened(): +def test_delete_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_crypto_key_version( - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_crypto_key_version( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].crypto_key_version - mock_val = resources.CryptoKeyVersion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_crypto_key_version_flattened_error(): +def test_delete_crypto_key_version_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7737,50 +7901,45 @@ def test_update_crypto_key_version_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_crypto_key_version( - service.UpdateCryptoKeyVersionRequest(), - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_crypto_key_version( + service.DeleteCryptoKeyVersionRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_crypto_key_version_flattened_async(): +async def test_delete_crypto_key_version_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_version), "__call__" + type(client.transport.delete_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_crypto_key_version( - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.delete_crypto_key_version( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].crypto_key_version - mock_val = resources.CryptoKeyVersion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_crypto_key_version_flattened_error_async(): +async def test_delete_crypto_key_version_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7788,21 +7947,20 @@ async def test_update_crypto_key_version_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_crypto_key_version( - service.UpdateCryptoKeyVersionRequest(), - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.delete_crypto_key_version( + service.DeleteCryptoKeyVersionRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateCryptoKeyPrimaryVersionRequest, + service.ImportCryptoKeyVersionRequest, dict, ], ) -def test_update_crypto_key_primary_version(request_type, transport: str = "grpc"): +def test_import_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7814,32 +7972,51 @@ def test_update_crypto_key_primary_version(request_type, transport: str = "grpc" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" + type(client.transport.import_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey( + call.return_value = resources.CryptoKeyVersion( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) - response = client.update_crypto_key_primary_version(request) + response = client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyPrimaryVersionRequest() + request = service.ImportCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_update_crypto_key_primary_version_non_empty_request_with_auto_populated_field(): +def test_import_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -7850,28 +8027,30 @@ def test_update_crypto_key_primary_version_non_empty_request_with_auto_populated # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateCryptoKeyPrimaryVersionRequest( - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", + request = service.ImportCryptoKeyVersionRequest( + parent="parent_value", + crypto_key_version="crypto_key_version_value", + import_job="import_job_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" + type(client.transport.import_crypto_key_version), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_crypto_key_primary_version(request=request) + client.import_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateCryptoKeyPrimaryVersionRequest( - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", + assert args[0] == service.ImportCryptoKeyVersionRequest( + parent="parent_value", + crypto_key_version="crypto_key_version_value", + import_job="import_job_value", ) -def test_update_crypto_key_primary_version_use_cached_wrapped_rpc(): +def test_import_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7886,7 +8065,7 @@ def test_update_crypto_key_primary_version_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_crypto_key_primary_version + client._transport.import_crypto_key_version in client._transport._wrapped_methods ) @@ -7896,15 +8075,15 @@ def test_update_crypto_key_primary_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_crypto_key_primary_version + client._transport.import_crypto_key_version ] = mock_rpc request = {} - client.update_crypto_key_primary_version(request) + client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_crypto_key_primary_version(request) + client.import_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7912,7 +8091,7 @@ def test_update_crypto_key_primary_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( +async def test_import_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7929,7 +8108,7 @@ async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_crypto_key_primary_version + client._client._transport.import_crypto_key_version in client._client._transport._wrapped_methods ) @@ -7937,16 +8116,16 @@ async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_crypto_key_primary_version + client._client._transport.import_crypto_key_version ] = mock_rpc request = {} - await client.update_crypto_key_primary_version(request) + await client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_crypto_key_primary_version(request) + await client.import_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7954,9 +8133,8 @@ async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_crypto_key_primary_version_async( - transport: str = "grpc_asyncio", - request_type=service.UpdateCryptoKeyPrimaryVersionRequest, +async def test_import_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.ImportCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -7969,55 +8147,74 @@ async def test_update_crypto_key_primary_version_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" + type(client.transport.import_crypto_key_version), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKey( + resources.CryptoKeyVersion( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.update_crypto_key_primary_version(request) + response = await client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateCryptoKeyPrimaryVersionRequest() + request = service.ImportCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_update_crypto_key_primary_version_async_from_dict(): - await test_update_crypto_key_primary_version_async(request_type=dict) +async def test_import_crypto_key_version_async_from_dict(): + await test_import_crypto_key_version_async(request_type=dict) -def test_update_crypto_key_primary_version_field_headers(): +def test_import_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyPrimaryVersionRequest() + request = service.ImportCryptoKeyVersionRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" + type(client.transport.import_crypto_key_version), "__call__" ) as call: - call.return_value = resources.CryptoKey() - client.update_crypto_key_primary_version(request) + call.return_value = resources.CryptoKeyVersion() + client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8028,28 +8225,30 @@ def test_update_crypto_key_primary_version_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_crypto_key_primary_version_field_headers_async(): +async def test_import_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateCryptoKeyPrimaryVersionRequest() + request = service.ImportCryptoKeyVersionRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" + type(client.transport.import_crypto_key_version), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) - await client.update_crypto_key_primary_version(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) + await client.import_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8060,112 +8259,18 @@ async def test_update_crypto_key_primary_version_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_crypto_key_primary_version_flattened(): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_crypto_key_primary_version( - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].crypto_key_version_id - mock_val = "crypto_key_version_id_value" - assert arg == mock_val - - -def test_update_crypto_key_primary_version_flattened_error(): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_crypto_key_primary_version( - service.UpdateCryptoKeyPrimaryVersionRequest(), - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", - ) - - -@pytest.mark.asyncio -async def test_update_crypto_key_primary_version_flattened_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_crypto_key_primary_version), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKey() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_crypto_key_primary_version( - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].crypto_key_version_id - mock_val = "crypto_key_version_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_crypto_key_primary_version_flattened_error_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_crypto_key_primary_version( - service.UpdateCryptoKeyPrimaryVersionRequest(), - name="name_value", - crypto_key_version_id="crypto_key_version_id_value", - ) - - @pytest.mark.parametrize( "request_type", [ - service.DestroyCryptoKeyVersionRequest, + service.CreateImportJobRequest, dict, ], ) -def test_destroy_crypto_key_version(request_type, transport: str = "grpc"): +def test_create_import_job(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8177,51 +8282,37 @@ def test_destroy_crypto_key_version(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( + call.return_value = resources.ImportJob( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + state=resources.ImportJob.ImportJobState.PENDING_GENERATION, + crypto_key_backend="crypto_key_backend_value", ) - response = client.destroy_crypto_key_version(request) + response = client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DestroyCryptoKeyVersionRequest() + request = service.CreateImportJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.ImportJob) assert response.name == "name_value" assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + response.import_method + == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_destroy_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_create_import_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -8232,26 +8323,28 @@ def test_destroy_crypto_key_version_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DestroyCryptoKeyVersionRequest( - name="name_value", + request = service.CreateImportJobRequest( + parent="parent_value", + import_job_id="import_job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.destroy_crypto_key_version(request=request) + client.create_import_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DestroyCryptoKeyVersionRequest( - name="name_value", - ) + assert args[0] == service.CreateImportJobRequest( + parent="parent_value", + import_job_id="import_job_id_value", + ) -def test_destroy_crypto_key_version_use_cached_wrapped_rpc(): +def test_create_import_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8265,10 +8358,7 @@ def test_destroy_crypto_key_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.destroy_crypto_key_version - in client._transport._wrapped_methods - ) + assert client._transport.create_import_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8276,15 +8366,15 @@ def test_destroy_crypto_key_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.destroy_crypto_key_version + client._transport.create_import_job ] = mock_rpc request = {} - client.destroy_crypto_key_version(request) + client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.destroy_crypto_key_version(request) + client.create_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8292,7 +8382,7 @@ def test_destroy_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_create_import_job_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8309,7 +8399,7 @@ async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.destroy_crypto_key_version + client._client._transport.create_import_job in client._client._transport._wrapped_methods ) @@ -8317,16 +8407,16 @@ async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.destroy_crypto_key_version + client._client._transport.create_import_job ] = mock_rpc request = {} - await client.destroy_crypto_key_version(request) + await client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.destroy_crypto_key_version(request) + await client.create_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8334,8 +8424,8 @@ async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_destroy_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.DestroyCryptoKeyVersionRequest +async def test_create_import_job_async( + transport: str = "grpc_asyncio", request_type=service.CreateImportJobRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -8348,74 +8438,60 @@ async def test_destroy_crypto_key_version_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( + resources.ImportJob( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + state=resources.ImportJob.ImportJobState.PENDING_GENERATION, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.destroy_crypto_key_version(request) + response = await client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DestroyCryptoKeyVersionRequest() + request = service.CreateImportJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.ImportJob) assert response.name == "name_value" assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + response.import_method + == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_destroy_crypto_key_version_async_from_dict(): - await test_destroy_crypto_key_version_async(request_type=dict) +async def test_create_import_job_async_from_dict(): + await test_create_import_job_async(request_type=dict) -def test_destroy_crypto_key_version_field_headers(): +def test_create_import_job_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DestroyCryptoKeyVersionRequest() + request = service.CreateImportJobRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: - call.return_value = resources.CryptoKeyVersion() - client.destroy_crypto_key_version(request) + call.return_value = resources.ImportJob() + client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8426,30 +8502,28 @@ def test_destroy_crypto_key_version_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_destroy_crypto_key_version_field_headers_async(): +async def test_create_import_job_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DestroyCryptoKeyVersionRequest() + request = service.CreateImportJobRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) - await client.destroy_crypto_key_version(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) + await client.create_import_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8460,37 +8534,45 @@ async def test_destroy_crypto_key_version_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_destroy_crypto_key_version_flattened(): +def test_create_import_job_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.ImportJob() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.destroy_crypto_key_version( - name="name_value", + client.create_import_job( + parent="parent_value", + import_job_id="import_job_id_value", + import_job=resources.ImportJob(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].import_job_id + mock_val = "import_job_id_value" + assert arg == mock_val + arg = args[0].import_job + mock_val = resources.ImportJob(name="name_value") assert arg == mock_val -def test_destroy_crypto_key_version_flattened_error(): +def test_create_import_job_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8498,45 +8580,53 @@ def test_destroy_crypto_key_version_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.destroy_crypto_key_version( - service.DestroyCryptoKeyVersionRequest(), - name="name_value", + client.create_import_job( + service.CreateImportJobRequest(), + parent="parent_value", + import_job_id="import_job_id_value", + import_job=resources.ImportJob(name="name_value"), ) @pytest.mark.asyncio -async def test_destroy_crypto_key_version_flattened_async(): +async def test_create_import_job_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.destroy_crypto_key_version), "__call__" + type(client.transport.create_import_job), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.ImportJob() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ImportJob()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.destroy_crypto_key_version( - name="name_value", + response = await client.create_import_job( + parent="parent_value", + import_job_id="import_job_id_value", + import_job=resources.ImportJob(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].import_job_id + mock_val = "import_job_id_value" + assert arg == mock_val + arg = args[0].import_job + mock_val = resources.ImportJob(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_destroy_crypto_key_version_flattened_error_async(): +async def test_create_import_job_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8544,20 +8634,22 @@ async def test_destroy_crypto_key_version_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.destroy_crypto_key_version( - service.DestroyCryptoKeyVersionRequest(), - name="name_value", + await client.create_import_job( + service.CreateImportJobRequest(), + parent="parent_value", + import_job_id="import_job_id_value", + import_job=resources.ImportJob(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - service.RestoreCryptoKeyVersionRequest, + service.UpdateCryptoKeyRequest, dict, ], ) -def test_restore_crypto_key_version(request_type, transport: str = "grpc"): +def test_update_crypto_key(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8569,51 +8661,32 @@ def test_restore_crypto_key_version(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion( + call.return_value = resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) - response = client.restore_crypto_key_version(request) + response = client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.RestoreCryptoKeyVersionRequest() + request = service.UpdateCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_restore_crypto_key_version_non_empty_request_with_auto_populated_field(): +def test_update_crypto_key_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -8624,26 +8697,22 @@ def test_restore_crypto_key_version_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.RestoreCryptoKeyVersionRequest( - name="name_value", - ) + request = service.UpdateCryptoKeyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.restore_crypto_key_version(request=request) + client.update_crypto_key(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RestoreCryptoKeyVersionRequest( - name="name_value", - ) + assert args[0] == service.UpdateCryptoKeyRequest() -def test_restore_crypto_key_version_use_cached_wrapped_rpc(): +def test_update_crypto_key_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8657,10 +8726,7 @@ def test_restore_crypto_key_version_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.restore_crypto_key_version - in client._transport._wrapped_methods - ) + assert client._transport.update_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8668,15 +8734,15 @@ def test_restore_crypto_key_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.restore_crypto_key_version + client._transport.update_crypto_key ] = mock_rpc request = {} - client.restore_crypto_key_version(request) + client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.restore_crypto_key_version(request) + client.update_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8684,7 +8750,7 @@ def test_restore_crypto_key_version_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( +async def test_update_crypto_key_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8701,7 +8767,7 @@ async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.restore_crypto_key_version + client._client._transport.update_crypto_key in client._client._transport._wrapped_methods ) @@ -8709,16 +8775,16 @@ async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.restore_crypto_key_version + client._client._transport.update_crypto_key ] = mock_rpc request = {} - await client.restore_crypto_key_version(request) + await client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.restore_crypto_key_version(request) + await client.update_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8726,9 +8792,9 @@ async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_restore_crypto_key_version_async( - transport: str = "grpc_asyncio", request_type=service.RestoreCryptoKeyVersionRequest -): +async def test_update_crypto_key_async( + transport: str = "grpc_asyncio", request_type=service.UpdateCryptoKeyRequest +): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8740,74 +8806,55 @@ async def test_restore_crypto_key_version_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion( + resources.CryptoKey( name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.restore_crypto_key_version(request) + response = await client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.RestoreCryptoKeyVersionRequest() + request = service.UpdateCryptoKeyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) + assert isinstance(response, resources.CryptoKey) assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_restore_crypto_key_version_async_from_dict(): - await test_restore_crypto_key_version_async(request_type=dict) +async def test_update_crypto_key_async_from_dict(): + await test_update_crypto_key_async(request_type=dict) -def test_restore_crypto_key_version_field_headers(): +def test_update_crypto_key_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreCryptoKeyVersionRequest() + request = service.UpdateCryptoKeyRequest() - request.name = "name_value" + request.crypto_key.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: - call.return_value = resources.CryptoKeyVersion() - client.restore_crypto_key_version(request) + call.return_value = resources.CryptoKey() + client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8818,30 +8865,28 @@ def test_restore_crypto_key_version_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "crypto_key.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_restore_crypto_key_version_field_headers_async(): +async def test_update_crypto_key_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreCryptoKeyVersionRequest() + request = service.UpdateCryptoKeyRequest() - request.name = "name_value" + request.crypto_key.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) - await client.restore_crypto_key_version(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + await client.update_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8852,37 +8897,41 @@ async def test_restore_crypto_key_version_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "crypto_key.name=name_value", ) in kw["metadata"] -def test_restore_crypto_key_version_flattened(): +def test_update_crypto_key_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.CryptoKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.restore_crypto_key_version( - name="name_value", + client.update_crypto_key( + crypto_key=resources.CryptoKey(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].crypto_key + mock_val = resources.CryptoKey(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_restore_crypto_key_version_flattened_error(): +def test_update_crypto_key_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8890,45 +8939,48 @@ def test_restore_crypto_key_version_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.restore_crypto_key_version( - service.RestoreCryptoKeyVersionRequest(), - name="name_value", + client.update_crypto_key( + service.UpdateCryptoKeyRequest(), + crypto_key=resources.CryptoKey(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_restore_crypto_key_version_flattened_async(): +async def test_update_crypto_key_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.restore_crypto_key_version), "__call__" + type(client.transport.update_crypto_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.CryptoKeyVersion() + call.return_value = resources.CryptoKey() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.CryptoKeyVersion() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.restore_crypto_key_version( - name="name_value", + response = await client.update_crypto_key( + crypto_key=resources.CryptoKey(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].crypto_key + mock_val = resources.CryptoKey(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_restore_crypto_key_version_flattened_error_async(): +async def test_update_crypto_key_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8936,20 +8988,21 @@ async def test_restore_crypto_key_version_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.restore_crypto_key_version( - service.RestoreCryptoKeyVersionRequest(), - name="name_value", + await client.update_crypto_key( + service.UpdateCryptoKeyRequest(), + crypto_key=resources.CryptoKey(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.EncryptRequest, + service.UpdateCryptoKeyVersionRequest, dict, ], ) -def test_encrypt(request_type, transport: str = "grpc"): +def test_update_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8960,33 +9013,52 @@ def test_encrypt(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.EncryptResponse( + call.return_value = resources.CryptoKeyVersion( name="name_value", - ciphertext=b"ciphertext_blob", - verified_plaintext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) - response = client.encrypt(request) + response = client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.EncryptRequest() + request = service.UpdateCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.EncryptResponse) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" - assert response.ciphertext == b"ciphertext_blob" - assert response.verified_plaintext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_encrypt_non_empty_request_with_auto_populated_field(): +def test_update_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -8997,24 +9069,22 @@ def test_encrypt_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.EncryptRequest( - name="name_value", - ) + request = service.UpdateCryptoKeyVersionRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.encrypt(request=request) + client.update_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.EncryptRequest( - name="name_value", - ) + assert args[0] == service.UpdateCryptoKeyVersionRequest() -def test_encrypt_use_cached_wrapped_rpc(): +def test_update_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9028,21 +9098,26 @@ def test_encrypt_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.encrypt in client._transport._wrapped_methods + assert ( + client._transport.update_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.encrypt] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_crypto_key_version + ] = mock_rpc request = {} - client.encrypt(request) + client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.encrypt(request) + client.update_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9050,7 +9125,9 @@ def test_encrypt_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_crypto_key_version_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9065,7 +9142,7 @@ async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async # Ensure method has been cached assert ( - client._client._transport.encrypt + client._client._transport.update_crypto_key_version in client._client._transport._wrapped_methods ) @@ -9073,16 +9150,16 @@ async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.encrypt + client._client._transport.update_crypto_key_version ] = mock_rpc request = {} - await client.encrypt(request) + await client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.encrypt(request) + await client.update_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9090,8 +9167,8 @@ async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async @pytest.mark.asyncio -async def test_encrypt_async( - transport: str = "grpc_asyncio", request_type=service.EncryptRequest +async def test_update_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.UpdateCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -9103,54 +9180,75 @@ async def test_encrypt_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.EncryptResponse( + resources.CryptoKeyVersion( name="name_value", - ciphertext=b"ciphertext_blob", - verified_plaintext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.encrypt(request) + response = await client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.EncryptRequest() + request = service.UpdateCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.EncryptResponse) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" - assert response.ciphertext == b"ciphertext_blob" - assert response.verified_plaintext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - - + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True + + @pytest.mark.asyncio -async def test_encrypt_async_from_dict(): - await test_encrypt_async(request_type=dict) +async def test_update_crypto_key_version_async_from_dict(): + await test_update_crypto_key_version_async(request_type=dict) -def test_encrypt_field_headers(): +def test_update_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.EncryptRequest() + request = service.UpdateCryptoKeyVersionRequest() - request.name = "name_value" + request.crypto_key_version.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: - call.return_value = service.EncryptResponse() - client.encrypt(request) + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: + call.return_value = resources.CryptoKeyVersion() + client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9161,28 +9259,30 @@ def test_encrypt_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "crypto_key_version.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_encrypt_field_headers_async(): +async def test_update_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.EncryptRequest() + request = service.UpdateCryptoKeyVersionRequest() - request.name = "name_value" + request.crypto_key_version.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.EncryptResponse() + resources.CryptoKeyVersion() ) - await client.encrypt(request) + await client.update_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9193,39 +9293,41 @@ async def test_encrypt_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "crypto_key_version.name=name_value", ) in kw["metadata"] -def test_encrypt_flattened(): +def test_update_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.EncryptResponse() + call.return_value = resources.CryptoKeyVersion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.encrypt( - name="name_value", - plaintext=b"plaintext_blob", + client.update_crypto_key_version( + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].crypto_key_version + mock_val = resources.CryptoKeyVersion(name="name_value") assert arg == mock_val - arg = args[0].plaintext - mock_val = b"plaintext_blob" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_encrypt_flattened_error(): +def test_update_crypto_key_version_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9233,48 +9335,50 @@ def test_encrypt_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.encrypt( - service.EncryptRequest(), - name="name_value", - plaintext=b"plaintext_blob", + client.update_crypto_key_version( + service.UpdateCryptoKeyVersionRequest(), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_encrypt_flattened_async(): +async def test_update_crypto_key_version_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.EncryptResponse() + call.return_value = resources.CryptoKeyVersion() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.EncryptResponse() + resources.CryptoKeyVersion() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.encrypt( - name="name_value", - plaintext=b"plaintext_blob", + response = await client.update_crypto_key_version( + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].crypto_key_version + mock_val = resources.CryptoKeyVersion(name="name_value") assert arg == mock_val - arg = args[0].plaintext - mock_val = b"plaintext_blob" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_encrypt_flattened_error_async(): +async def test_update_crypto_key_version_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9282,21 +9386,21 @@ async def test_encrypt_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.encrypt( - service.EncryptRequest(), - name="name_value", - plaintext=b"plaintext_blob", + await client.update_crypto_key_version( + service.UpdateCryptoKeyVersionRequest(), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.DecryptRequest, + service.UpdateCryptoKeyPrimaryVersionRequest, dict, ], ) -def test_decrypt(request_type, transport: str = "grpc"): +def test_update_crypto_key_primary_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9307,29 +9411,33 @@ def test_decrypt(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.DecryptResponse( - plaintext=b"plaintext_blob", - used_primary=True, - protection_level=resources.ProtectionLevel.SOFTWARE, + call.return_value = resources.CryptoKey( + name="name_value", + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) - response = client.decrypt(request) + response = client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DecryptRequest() + request = service.UpdateCryptoKeyPrimaryVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.DecryptResponse) - assert response.plaintext == b"plaintext_blob" - assert response.used_primary is True - assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert isinstance(response, resources.CryptoKey) + assert response.name == "name_value" + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" -def test_decrypt_non_empty_request_with_auto_populated_field(): +def test_update_crypto_key_primary_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -9340,24 +9448,28 @@ def test_decrypt_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DecryptRequest( + request = service.UpdateCryptoKeyPrimaryVersionRequest( name="name_value", + crypto_key_version_id="crypto_key_version_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.decrypt(request=request) + client.update_crypto_key_primary_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DecryptRequest( + assert args[0] == service.UpdateCryptoKeyPrimaryVersionRequest( name="name_value", + crypto_key_version_id="crypto_key_version_id_value", ) -def test_decrypt_use_cached_wrapped_rpc(): +def test_update_crypto_key_primary_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9371,21 +9483,26 @@ def test_decrypt_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.decrypt in client._transport._wrapped_methods + assert ( + client._transport.update_crypto_key_primary_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.decrypt] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_crypto_key_primary_version + ] = mock_rpc request = {} - client.decrypt(request) + client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.decrypt(request) + client.update_crypto_key_primary_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9393,7 +9510,9 @@ def test_decrypt_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_crypto_key_primary_version_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9408,7 +9527,7 @@ async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async # Ensure method has been cached assert ( - client._client._transport.decrypt + client._client._transport.update_crypto_key_primary_version in client._client._transport._wrapped_methods ) @@ -9416,16 +9535,16 @@ async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.decrypt + client._client._transport.update_crypto_key_primary_version ] = mock_rpc request = {} - await client.decrypt(request) + await client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.decrypt(request) + await client.update_crypto_key_primary_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9433,8 +9552,9 @@ async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_async @pytest.mark.asyncio -async def test_decrypt_async( - transport: str = "grpc_asyncio", request_type=service.DecryptRequest +async def test_update_crypto_key_primary_version_async( + transport: str = "grpc_asyncio", + request_type=service.UpdateCryptoKeyPrimaryVersionRequest, ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -9446,50 +9566,56 @@ async def test_decrypt_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.DecryptResponse( - plaintext=b"plaintext_blob", - used_primary=True, - protection_level=resources.ProtectionLevel.SOFTWARE, + resources.CryptoKey( + name="name_value", + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", ) ) - response = await client.decrypt(request) + response = await client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DecryptRequest() + request = service.UpdateCryptoKeyPrimaryVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.DecryptResponse) - assert response.plaintext == b"plaintext_blob" - assert response.used_primary is True - assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert isinstance(response, resources.CryptoKey) + assert response.name == "name_value" + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.asyncio -async def test_decrypt_async_from_dict(): - await test_decrypt_async(request_type=dict) +async def test_update_crypto_key_primary_version_async_from_dict(): + await test_update_crypto_key_primary_version_async(request_type=dict) -def test_decrypt_field_headers(): +def test_update_crypto_key_primary_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DecryptRequest() + request = service.UpdateCryptoKeyPrimaryVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: - call.return_value = service.DecryptResponse() - client.decrypt(request) + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: + call.return_value = resources.CryptoKey() + client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9505,23 +9631,23 @@ def test_decrypt_field_headers(): @pytest.mark.asyncio -async def test_decrypt_field_headers_async(): +async def test_update_crypto_key_primary_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DecryptRequest() + request = service.UpdateCryptoKeyPrimaryVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.DecryptResponse() - ) - await client.decrypt(request) + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) + await client.update_crypto_key_primary_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9536,20 +9662,22 @@ async def test_decrypt_field_headers_async(): ) in kw["metadata"] -def test_decrypt_flattened(): +def test_update_crypto_key_primary_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.DecryptResponse() + call.return_value = resources.CryptoKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.decrypt( + client.update_crypto_key_primary_version( name="name_value", - ciphertext=b"ciphertext_blob", + crypto_key_version_id="crypto_key_version_id_value", ) # Establish that the underlying call was made with the expected @@ -9559,12 +9687,12 @@ def test_decrypt_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].ciphertext - mock_val = b"ciphertext_blob" + arg = args[0].crypto_key_version_id + mock_val = "crypto_key_version_id_value" assert arg == mock_val -def test_decrypt_flattened_error(): +def test_update_crypto_key_primary_version_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9572,32 +9700,32 @@ def test_decrypt_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.decrypt( - service.DecryptRequest(), + client.update_crypto_key_primary_version( + service.UpdateCryptoKeyPrimaryVersionRequest(), name="name_value", - ciphertext=b"ciphertext_blob", + crypto_key_version_id="crypto_key_version_id_value", ) @pytest.mark.asyncio -async def test_decrypt_flattened_async(): +async def test_update_crypto_key_primary_version_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.update_crypto_key_primary_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.DecryptResponse() + call.return_value = resources.CryptoKey() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.DecryptResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.CryptoKey()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.decrypt( + response = await client.update_crypto_key_primary_version( name="name_value", - ciphertext=b"ciphertext_blob", + crypto_key_version_id="crypto_key_version_id_value", ) # Establish that the underlying call was made with the expected @@ -9607,13 +9735,13 @@ async def test_decrypt_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].ciphertext - mock_val = b"ciphertext_blob" + arg = args[0].crypto_key_version_id + mock_val = "crypto_key_version_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_decrypt_flattened_error_async(): +async def test_update_crypto_key_primary_version_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9621,21 +9749,21 @@ async def test_decrypt_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.decrypt( - service.DecryptRequest(), + await client.update_crypto_key_primary_version( + service.UpdateCryptoKeyPrimaryVersionRequest(), name="name_value", - ciphertext=b"ciphertext_blob", + crypto_key_version_id="crypto_key_version_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.RawEncryptRequest, + service.DestroyCryptoKeyVersionRequest, dict, ], ) -def test_raw_encrypt(request_type, transport: str = "grpc"): +def test_destroy_crypto_key_version(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9646,39 +9774,52 @@ def test_raw_encrypt(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.RawEncryptResponse( - ciphertext=b"ciphertext_blob", - initialization_vector=b"initialization_vector_blob", - tag_length=1053, - verified_plaintext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, - verified_initialization_vector_crc32c=True, + call.return_value = resources.CryptoKeyVersion( name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) - response = client.raw_encrypt(request) + response = client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.RawEncryptRequest() + request = service.DestroyCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.RawEncryptResponse) - assert response.ciphertext == b"ciphertext_blob" - assert response.initialization_vector == b"initialization_vector_blob" - assert response.tag_length == 1053 - assert response.verified_plaintext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True - assert response.verified_initialization_vector_crc32c is True + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_raw_encrypt_non_empty_request_with_auto_populated_field(): +def test_destroy_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -9689,24 +9830,26 @@ def test_raw_encrypt_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.RawEncryptRequest( + request = service.DestroyCryptoKeyVersionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.raw_encrypt(request=request) + client.destroy_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RawEncryptRequest( + assert args[0] == service.DestroyCryptoKeyVersionRequest( name="name_value", ) -def test_raw_encrypt_use_cached_wrapped_rpc(): +def test_destroy_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9720,21 +9863,26 @@ def test_raw_encrypt_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.raw_encrypt in client._transport._wrapped_methods + assert ( + client._transport.destroy_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.raw_encrypt] = mock_rpc + client._transport._wrapped_methods[ + client._transport.destroy_crypto_key_version + ] = mock_rpc request = {} - client.raw_encrypt(request) + client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.raw_encrypt(request) + client.destroy_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9742,7 +9890,7 @@ def test_raw_encrypt_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_raw_encrypt_async_use_cached_wrapped_rpc( +async def test_destroy_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9759,7 +9907,7 @@ async def test_raw_encrypt_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.raw_encrypt + client._client._transport.destroy_crypto_key_version in client._client._transport._wrapped_methods ) @@ -9767,16 +9915,16 @@ async def test_raw_encrypt_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.raw_encrypt + client._client._transport.destroy_crypto_key_version ] = mock_rpc request = {} - await client.raw_encrypt(request) + await client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.raw_encrypt(request) + await client.destroy_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9784,8 +9932,8 @@ async def test_raw_encrypt_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_raw_encrypt_async( - transport: str = "grpc_asyncio", request_type=service.RawEncryptRequest +async def test_destroy_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.DestroyCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -9797,60 +9945,75 @@ async def test_raw_encrypt_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.RawEncryptResponse( - ciphertext=b"ciphertext_blob", - initialization_vector=b"initialization_vector_blob", - tag_length=1053, - verified_plaintext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, - verified_initialization_vector_crc32c=True, + resources.CryptoKeyVersion( name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.raw_encrypt(request) + response = await client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.RawEncryptRequest() + request = service.DestroyCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.RawEncryptResponse) - assert response.ciphertext == b"ciphertext_blob" - assert response.initialization_vector == b"initialization_vector_blob" - assert response.tag_length == 1053 - assert response.verified_plaintext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True - assert response.verified_initialization_vector_crc32c is True + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_raw_encrypt_async_from_dict(): - await test_raw_encrypt_async(request_type=dict) +async def test_destroy_crypto_key_version_async_from_dict(): + await test_destroy_crypto_key_version_async(request_type=dict) -def test_raw_encrypt_field_headers(): +def test_destroy_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RawEncryptRequest() + request = service.DestroyCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: - call.return_value = service.RawEncryptResponse() - client.raw_encrypt(request) + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: + call.return_value = resources.CryptoKeyVersion() + client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9866,23 +10029,25 @@ def test_raw_encrypt_field_headers(): @pytest.mark.asyncio -async def test_raw_encrypt_field_headers_async(): +async def test_destroy_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RawEncryptRequest() + request = service.DestroyCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.RawEncryptResponse() + resources.CryptoKeyVersion() ) - await client.raw_encrypt(request) + await client.destroy_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9897,51 +10062,156 @@ async def test_raw_encrypt_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - service.RawDecryptRequest, - dict, - ], -) -def test_raw_decrypt(request_type, transport: str = "grpc"): +def test_destroy_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.RawDecryptResponse( - plaintext=b"plaintext_blob", - protection_level=resources.ProtectionLevel.SOFTWARE, - verified_ciphertext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, - verified_initialization_vector_crc32c=True, + call.return_value = resources.CryptoKeyVersion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.destroy_crypto_key_version( + name="name_value", ) - response = client.raw_decrypt(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.RawDecryptRequest() - assert args[0] == request + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # Establish that the response is the type that we expect. - assert isinstance(response, service.RawDecryptResponse) - assert response.plaintext == b"plaintext_blob" + +def test_destroy_crypto_key_version_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.destroy_crypto_key_version( + service.DestroyCryptoKeyVersionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_destroy_crypto_key_version_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.destroy_crypto_key_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKeyVersion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.destroy_crypto_key_version( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_destroy_crypto_key_version_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.destroy_crypto_key_version( + service.DestroyCryptoKeyVersionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreCryptoKeyVersionRequest, + dict, + ], +) +def test_restore_crypto_key_version(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKeyVersion( + name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, + ) + response = client.restore_crypto_key_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.RestoreCryptoKeyVersionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CryptoKeyVersion) + assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.verified_ciphertext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True - assert response.verified_initialization_vector_crc32c is True + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True -def test_raw_decrypt_non_empty_request_with_auto_populated_field(): +def test_restore_crypto_key_version_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -9952,24 +10222,26 @@ def test_raw_decrypt_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.RawDecryptRequest( + request = service.RestoreCryptoKeyVersionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.raw_decrypt(request=request) + client.restore_crypto_key_version(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RawDecryptRequest( + assert args[0] == service.RestoreCryptoKeyVersionRequest( name="name_value", ) -def test_raw_decrypt_use_cached_wrapped_rpc(): +def test_restore_crypto_key_version_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9983,21 +10255,26 @@ def test_raw_decrypt_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.raw_decrypt in client._transport._wrapped_methods + assert ( + client._transport.restore_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.raw_decrypt] = mock_rpc + client._transport._wrapped_methods[ + client._transport.restore_crypto_key_version + ] = mock_rpc request = {} - client.raw_decrypt(request) + client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.raw_decrypt(request) + client.restore_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10005,7 +10282,7 @@ def test_raw_decrypt_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_raw_decrypt_async_use_cached_wrapped_rpc( +async def test_restore_crypto_key_version_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10022,7 +10299,7 @@ async def test_raw_decrypt_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.raw_decrypt + client._client._transport.restore_crypto_key_version in client._client._transport._wrapped_methods ) @@ -10030,16 +10307,16 @@ async def test_raw_decrypt_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.raw_decrypt + client._client._transport.restore_crypto_key_version ] = mock_rpc request = {} - await client.raw_decrypt(request) + await client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.raw_decrypt(request) + await client.restore_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10047,8 +10324,8 @@ async def test_raw_decrypt_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_raw_decrypt_async( - transport: str = "grpc_asyncio", request_type=service.RawDecryptRequest +async def test_restore_crypto_key_version_async( + transport: str = "grpc_asyncio", request_type=service.RestoreCryptoKeyVersionRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -10060,54 +10337,75 @@ async def test_raw_decrypt_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.RawDecryptResponse( - plaintext=b"plaintext_blob", + resources.CryptoKeyVersion( + name="name_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, protection_level=resources.ProtectionLevel.SOFTWARE, - verified_ciphertext_crc32c=True, - verified_additional_authenticated_data_crc32c=True, - verified_initialization_vector_crc32c=True, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) ) - response = await client.raw_decrypt(request) + response = await client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.RawDecryptRequest() + request = service.RestoreCryptoKeyVersionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.RawDecryptResponse) - assert response.plaintext == b"plaintext_blob" + assert isinstance(response, resources.CryptoKeyVersion) + assert response.name == "name_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.verified_ciphertext_crc32c is True - assert response.verified_additional_authenticated_data_crc32c is True - assert response.verified_initialization_vector_crc32c is True + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.asyncio -async def test_raw_decrypt_async_from_dict(): - await test_raw_decrypt_async(request_type=dict) +async def test_restore_crypto_key_version_async_from_dict(): + await test_restore_crypto_key_version_async(request_type=dict) -def test_raw_decrypt_field_headers(): +def test_restore_crypto_key_version_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RawDecryptRequest() + request = service.RestoreCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: - call.return_value = service.RawDecryptResponse() - client.raw_decrypt(request) + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: + call.return_value = resources.CryptoKeyVersion() + client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10123,23 +10421,25 @@ def test_raw_decrypt_field_headers(): @pytest.mark.asyncio -async def test_raw_decrypt_field_headers_async(): +async def test_restore_crypto_key_version_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RawDecryptRequest() + request = service.RestoreCryptoKeyVersionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.RawDecryptResponse() + resources.CryptoKeyVersion() ) - await client.raw_decrypt(request) + await client.restore_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10154,51 +10454,137 @@ async def test_raw_decrypt_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - service.AsymmetricSignRequest, - dict, - ], -) -def test_asymmetric_sign(request_type, transport: str = "grpc"): +def test_restore_crypto_key_version_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricSignResponse( - signature=b"signature_blob", - verified_digest_crc32c=True, + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKeyVersion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_crypto_key_version( name="name_value", - verified_data_crc32c=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restore_crypto_key_version_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_crypto_key_version( + service.RestoreCryptoKeyVersionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restore_crypto_key_version_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_crypto_key_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.CryptoKeyVersion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.CryptoKeyVersion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_crypto_key_version( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_crypto_key_version_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_crypto_key_version( + service.RestoreCryptoKeyVersionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.EncryptRequest, + dict, + ], +) +def test_encrypt(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.EncryptResponse( + name="name_value", + ciphertext=b"ciphertext_blob", + verified_plaintext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) - response = client.asymmetric_sign(request) + response = client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.AsymmetricSignRequest() + request = service.EncryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.AsymmetricSignResponse) - assert response.signature == b"signature_blob" - assert response.verified_digest_crc32c is True + assert isinstance(response, service.EncryptResponse) assert response.name == "name_value" - assert response.verified_data_crc32c is True + assert response.ciphertext == b"ciphertext_blob" + assert response.verified_plaintext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE -def test_asymmetric_sign_non_empty_request_with_auto_populated_field(): +def test_encrypt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -10209,24 +10595,24 @@ def test_asymmetric_sign_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.AsymmetricSignRequest( + request = service.EncryptRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.asymmetric_sign(request=request) + client.encrypt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.AsymmetricSignRequest( + assert args[0] == service.EncryptRequest( name="name_value", ) -def test_asymmetric_sign_use_cached_wrapped_rpc(): +def test_encrypt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10240,21 +10626,21 @@ def test_asymmetric_sign_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.asymmetric_sign in client._transport._wrapped_methods + assert client._transport.encrypt in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.asymmetric_sign] = mock_rpc + client._transport._wrapped_methods[client._transport.encrypt] = mock_rpc request = {} - client.asymmetric_sign(request) + client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.asymmetric_sign(request) + client.encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10262,9 +10648,7 @@ def test_asymmetric_sign_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_asymmetric_sign_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_encrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10279,7 +10663,7 @@ async def test_asymmetric_sign_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.asymmetric_sign + client._client._transport.encrypt in client._client._transport._wrapped_methods ) @@ -10287,16 +10671,16 @@ async def test_asymmetric_sign_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.asymmetric_sign + client._client._transport.encrypt ] = mock_rpc request = {} - await client.asymmetric_sign(request) + await client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.asymmetric_sign(request) + await client.encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10304,8 +10688,8 @@ async def test_asymmetric_sign_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_asymmetric_sign_async( - transport: str = "grpc_asyncio", request_type=service.AsymmetricSignRequest +async def test_encrypt_async( + transport: str = "grpc_asyncio", request_type=service.EncryptRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -10317,54 +10701,54 @@ async def test_asymmetric_sign_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricSignResponse( - signature=b"signature_blob", - verified_digest_crc32c=True, + service.EncryptResponse( name="name_value", - verified_data_crc32c=True, + ciphertext=b"ciphertext_blob", + verified_plaintext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) ) - response = await client.asymmetric_sign(request) + response = await client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.AsymmetricSignRequest() + request = service.EncryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.AsymmetricSignResponse) - assert response.signature == b"signature_blob" - assert response.verified_digest_crc32c is True + assert isinstance(response, service.EncryptResponse) assert response.name == "name_value" - assert response.verified_data_crc32c is True + assert response.ciphertext == b"ciphertext_blob" + assert response.verified_plaintext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE @pytest.mark.asyncio -async def test_asymmetric_sign_async_from_dict(): - await test_asymmetric_sign_async(request_type=dict) +async def test_encrypt_async_from_dict(): + await test_encrypt_async(request_type=dict) -def test_asymmetric_sign_field_headers(): +def test_encrypt_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.AsymmetricSignRequest() + request = service.EncryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: - call.return_value = service.AsymmetricSignResponse() - client.asymmetric_sign(request) + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: + call.return_value = service.EncryptResponse() + client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10380,23 +10764,23 @@ def test_asymmetric_sign_field_headers(): @pytest.mark.asyncio -async def test_asymmetric_sign_field_headers_async(): +async def test_encrypt_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.AsymmetricSignRequest() + request = service.EncryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricSignResponse() + service.EncryptResponse() ) - await client.asymmetric_sign(request) + await client.encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10411,20 +10795,20 @@ async def test_asymmetric_sign_field_headers_async(): ) in kw["metadata"] -def test_asymmetric_sign_flattened(): +def test_encrypt_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricSignResponse() + call.return_value = service.EncryptResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.asymmetric_sign( + client.encrypt( name="name_value", - digest=service.Digest(sha256=b"sha256_blob"), + plaintext=b"plaintext_blob", ) # Establish that the underlying call was made with the expected @@ -10434,12 +10818,12 @@ def test_asymmetric_sign_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].digest - mock_val = service.Digest(sha256=b"sha256_blob") + arg = args[0].plaintext + mock_val = b"plaintext_blob" assert arg == mock_val -def test_asymmetric_sign_flattened_error(): +def test_encrypt_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10447,32 +10831,32 @@ def test_asymmetric_sign_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.asymmetric_sign( - service.AsymmetricSignRequest(), + client.encrypt( + service.EncryptRequest(), name="name_value", - digest=service.Digest(sha256=b"sha256_blob"), + plaintext=b"plaintext_blob", ) @pytest.mark.asyncio -async def test_asymmetric_sign_flattened_async(): +async def test_encrypt_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + with mock.patch.object(type(client.transport.encrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricSignResponse() + call.return_value = service.EncryptResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricSignResponse() + service.EncryptResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.asymmetric_sign( + response = await client.encrypt( name="name_value", - digest=service.Digest(sha256=b"sha256_blob"), + plaintext=b"plaintext_blob", ) # Establish that the underlying call was made with the expected @@ -10482,13 +10866,13 @@ async def test_asymmetric_sign_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].digest - mock_val = service.Digest(sha256=b"sha256_blob") + arg = args[0].plaintext + mock_val = b"plaintext_blob" assert arg == mock_val @pytest.mark.asyncio -async def test_asymmetric_sign_flattened_error_async(): +async def test_encrypt_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10496,21 +10880,21 @@ async def test_asymmetric_sign_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.asymmetric_sign( - service.AsymmetricSignRequest(), + await client.encrypt( + service.EncryptRequest(), name="name_value", - digest=service.Digest(sha256=b"sha256_blob"), + plaintext=b"plaintext_blob", ) @pytest.mark.parametrize( "request_type", [ - service.AsymmetricDecryptRequest, + service.DecryptRequest, dict, ], ) -def test_asymmetric_decrypt(request_type, transport: str = "grpc"): +def test_decrypt(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10521,31 +10905,29 @@ def test_asymmetric_decrypt(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricDecryptResponse( + call.return_value = service.DecryptResponse( plaintext=b"plaintext_blob", - verified_ciphertext_crc32c=True, + used_primary=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) - response = client.asymmetric_decrypt(request) + response = client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.AsymmetricDecryptRequest() + request = service.DecryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.AsymmetricDecryptResponse) + assert isinstance(response, service.DecryptResponse) assert response.plaintext == b"plaintext_blob" - assert response.verified_ciphertext_crc32c is True + assert response.used_primary is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE -def test_asymmetric_decrypt_non_empty_request_with_auto_populated_field(): +def test_decrypt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -10556,26 +10938,24 @@ def test_asymmetric_decrypt_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.AsymmetricDecryptRequest( + request = service.DecryptRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.asymmetric_decrypt(request=request) + client.decrypt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.AsymmetricDecryptRequest( + assert args[0] == service.DecryptRequest( name="name_value", ) -def test_asymmetric_decrypt_use_cached_wrapped_rpc(): +def test_decrypt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10589,25 +10969,21 @@ def test_asymmetric_decrypt_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.asymmetric_decrypt in client._transport._wrapped_methods - ) + assert client._transport.decrypt in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.asymmetric_decrypt - ] = mock_rpc + client._transport._wrapped_methods[client._transport.decrypt] = mock_rpc request = {} - client.asymmetric_decrypt(request) + client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.asymmetric_decrypt(request) + client.decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10615,9 +10991,7 @@ def test_asymmetric_decrypt_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_decrypt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10632,7 +11006,7 @@ async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.asymmetric_decrypt + client._client._transport.decrypt in client._client._transport._wrapped_methods ) @@ -10640,16 +11014,16 @@ async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.asymmetric_decrypt + client._client._transport.decrypt ] = mock_rpc request = {} - await client.asymmetric_decrypt(request) + await client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.asymmetric_decrypt(request) + await client.decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10657,8 +11031,8 @@ async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_asymmetric_decrypt_async( - transport: str = "grpc_asyncio", request_type=service.AsymmetricDecryptRequest +async def test_decrypt_async( + transport: str = "grpc_asyncio", request_type=service.DecryptRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -10670,54 +11044,50 @@ async def test_asymmetric_decrypt_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricDecryptResponse( + service.DecryptResponse( plaintext=b"plaintext_blob", - verified_ciphertext_crc32c=True, + used_primary=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) ) - response = await client.asymmetric_decrypt(request) + response = await client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.AsymmetricDecryptRequest() + request = service.DecryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.AsymmetricDecryptResponse) + assert isinstance(response, service.DecryptResponse) assert response.plaintext == b"plaintext_blob" - assert response.verified_ciphertext_crc32c is True + assert response.used_primary is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE @pytest.mark.asyncio -async def test_asymmetric_decrypt_async_from_dict(): - await test_asymmetric_decrypt_async(request_type=dict) +async def test_decrypt_async_from_dict(): + await test_decrypt_async(request_type=dict) -def test_asymmetric_decrypt_field_headers(): +def test_decrypt_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.AsymmetricDecryptRequest() + request = service.DecryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: - call.return_value = service.AsymmetricDecryptResponse() - client.asymmetric_decrypt(request) + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: + call.return_value = service.DecryptResponse() + client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10733,25 +11103,23 @@ def test_asymmetric_decrypt_field_headers(): @pytest.mark.asyncio -async def test_asymmetric_decrypt_field_headers_async(): +async def test_decrypt_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.AsymmetricDecryptRequest() + request = service.DecryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricDecryptResponse() + service.DecryptResponse() ) - await client.asymmetric_decrypt(request) + await client.decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10766,20 +11134,18 @@ async def test_asymmetric_decrypt_field_headers_async(): ) in kw["metadata"] -def test_asymmetric_decrypt_flattened(): +def test_decrypt_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricDecryptResponse() + call.return_value = service.DecryptResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.asymmetric_decrypt( + client.decrypt( name="name_value", ciphertext=b"ciphertext_blob", ) @@ -10796,7 +11162,7 @@ def test_asymmetric_decrypt_flattened(): assert arg == mock_val -def test_asymmetric_decrypt_flattened_error(): +def test_decrypt_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10804,32 +11170,30 @@ def test_asymmetric_decrypt_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.asymmetric_decrypt( - service.AsymmetricDecryptRequest(), + client.decrypt( + service.DecryptRequest(), name="name_value", ciphertext=b"ciphertext_blob", ) @pytest.mark.asyncio -async def test_asymmetric_decrypt_flattened_async(): +async def test_decrypt_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.asymmetric_decrypt), "__call__" - ) as call: + with mock.patch.object(type(client.transport.decrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.AsymmetricDecryptResponse() + call.return_value = service.DecryptResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.AsymmetricDecryptResponse() + service.DecryptResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.asymmetric_decrypt( + response = await client.decrypt( name="name_value", ciphertext=b"ciphertext_blob", ) @@ -10847,7 +11211,7 @@ async def test_asymmetric_decrypt_flattened_async(): @pytest.mark.asyncio -async def test_asymmetric_decrypt_flattened_error_async(): +async def test_decrypt_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10855,8 +11219,8 @@ async def test_asymmetric_decrypt_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.asymmetric_decrypt( - service.AsymmetricDecryptRequest(), + await client.decrypt( + service.DecryptRequest(), name="name_value", ciphertext=b"ciphertext_blob", ) @@ -10865,11 +11229,11 @@ async def test_asymmetric_decrypt_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.MacSignRequest, + service.RawEncryptRequest, dict, ], ) -def test_mac_sign(request_type, transport: str = "grpc"): +def test_raw_encrypt(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10880,31 +11244,39 @@ def test_mac_sign(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.MacSignResponse( + call.return_value = service.RawEncryptResponse( + ciphertext=b"ciphertext_blob", + initialization_vector=b"initialization_vector_blob", + tag_length=1053, + verified_plaintext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, + verified_initialization_vector_crc32c=True, name="name_value", - mac=b"mac_blob", - verified_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) - response = client.mac_sign(request) + response = client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.MacSignRequest() + request = service.RawEncryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.MacSignResponse) + assert isinstance(response, service.RawEncryptResponse) + assert response.ciphertext == b"ciphertext_blob" + assert response.initialization_vector == b"initialization_vector_blob" + assert response.tag_length == 1053 + assert response.verified_plaintext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True + assert response.verified_initialization_vector_crc32c is True assert response.name == "name_value" - assert response.mac == b"mac_blob" - assert response.verified_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE -def test_mac_sign_non_empty_request_with_auto_populated_field(): +def test_raw_encrypt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -10915,24 +11287,24 @@ def test_mac_sign_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.MacSignRequest( + request = service.RawEncryptRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.mac_sign(request=request) + client.raw_encrypt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.MacSignRequest( + assert args[0] == service.RawEncryptRequest( name="name_value", ) -def test_mac_sign_use_cached_wrapped_rpc(): +def test_raw_encrypt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10946,21 +11318,21 @@ def test_mac_sign_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.mac_sign in client._transport._wrapped_methods + assert client._transport.raw_encrypt in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.mac_sign] = mock_rpc + client._transport._wrapped_methods[client._transport.raw_encrypt] = mock_rpc request = {} - client.mac_sign(request) + client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.mac_sign(request) + client.raw_encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10968,7 +11340,9 @@ def test_mac_sign_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_raw_encrypt_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10983,7 +11357,7 @@ async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.mac_sign + client._client._transport.raw_encrypt in client._client._transport._wrapped_methods ) @@ -10991,16 +11365,16 @@ async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.mac_sign + client._client._transport.raw_encrypt ] = mock_rpc request = {} - await client.mac_sign(request) + await client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.mac_sign(request) + await client.raw_encrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11008,8 +11382,8 @@ async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_mac_sign_async( - transport: str = "grpc_asyncio", request_type=service.MacSignRequest +async def test_raw_encrypt_async( + transport: str = "grpc_asyncio", request_type=service.RawEncryptRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -11021,52 +11395,60 @@ async def test_mac_sign_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacSignResponse( + service.RawEncryptResponse( + ciphertext=b"ciphertext_blob", + initialization_vector=b"initialization_vector_blob", + tag_length=1053, + verified_plaintext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, + verified_initialization_vector_crc32c=True, name="name_value", - mac=b"mac_blob", - verified_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) ) - response = await client.mac_sign(request) + response = await client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.MacSignRequest() + request = service.RawEncryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.MacSignResponse) + assert isinstance(response, service.RawEncryptResponse) + assert response.ciphertext == b"ciphertext_blob" + assert response.initialization_vector == b"initialization_vector_blob" + assert response.tag_length == 1053 + assert response.verified_plaintext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True + assert response.verified_initialization_vector_crc32c is True assert response.name == "name_value" - assert response.mac == b"mac_blob" - assert response.verified_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE @pytest.mark.asyncio -async def test_mac_sign_async_from_dict(): - await test_mac_sign_async(request_type=dict) +async def test_raw_encrypt_async_from_dict(): + await test_raw_encrypt_async(request_type=dict) -def test_mac_sign_field_headers(): +def test_raw_encrypt_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.MacSignRequest() + request = service.RawEncryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: - call.return_value = service.MacSignResponse() - client.mac_sign(request) + with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: + call.return_value = service.RawEncryptResponse() + client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11082,23 +11464,23 @@ def test_mac_sign_field_headers(): @pytest.mark.asyncio -async def test_mac_sign_field_headers_async(): +async def test_raw_encrypt_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.MacSignRequest() + request = service.RawEncryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + with mock.patch.object(type(client.transport.raw_encrypt), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacSignResponse() + service.RawEncryptResponse() ) - await client.mac_sign(request) + await client.raw_encrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11113,106 +11495,14 @@ async def test_mac_sign_field_headers_async(): ) in kw["metadata"] -def test_mac_sign_flattened(): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.MacSignResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.mac_sign( - name="name_value", - data=b"data_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].data - mock_val = b"data_blob" - assert arg == mock_val - - -def test_mac_sign_flattened_error(): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.mac_sign( - service.MacSignRequest(), - name="name_value", - data=b"data_blob", - ) - - -@pytest.mark.asyncio -async def test_mac_sign_flattened_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.MacSignResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacSignResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.mac_sign( - name="name_value", - data=b"data_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].data - mock_val = b"data_blob" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_mac_sign_flattened_error_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.mac_sign( - service.MacSignRequest(), - name="name_value", - data=b"data_blob", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - service.MacVerifyRequest, - dict, - ], -) -def test_mac_verify(request_type, transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.RawDecryptRequest, + dict, + ], +) +def test_raw_decrypt(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11223,35 +11513,33 @@ def test_mac_verify(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.MacVerifyResponse( - name="name_value", - success=True, - verified_data_crc32c=True, - verified_mac_crc32c=True, - verified_success_integrity=True, + call.return_value = service.RawDecryptResponse( + plaintext=b"plaintext_blob", protection_level=resources.ProtectionLevel.SOFTWARE, + verified_ciphertext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, + verified_initialization_vector_crc32c=True, ) - response = client.mac_verify(request) + response = client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.MacVerifyRequest() + request = service.RawDecryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.MacVerifyResponse) - assert response.name == "name_value" - assert response.success is True - assert response.verified_data_crc32c is True - assert response.verified_mac_crc32c is True - assert response.verified_success_integrity is True + assert isinstance(response, service.RawDecryptResponse) + assert response.plaintext == b"plaintext_blob" assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert response.verified_ciphertext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True + assert response.verified_initialization_vector_crc32c is True -def test_mac_verify_non_empty_request_with_auto_populated_field(): +def test_raw_decrypt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -11262,24 +11550,24 @@ def test_mac_verify_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.MacVerifyRequest( + request = service.RawDecryptRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.mac_verify(request=request) + client.raw_decrypt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.MacVerifyRequest( + assert args[0] == service.RawDecryptRequest( name="name_value", ) -def test_mac_verify_use_cached_wrapped_rpc(): +def test_raw_decrypt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11293,21 +11581,21 @@ def test_mac_verify_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.mac_verify in client._transport._wrapped_methods + assert client._transport.raw_decrypt in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.mac_verify] = mock_rpc + client._transport._wrapped_methods[client._transport.raw_decrypt] = mock_rpc request = {} - client.mac_verify(request) + client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.mac_verify(request) + client.raw_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11315,7 +11603,9 @@ def test_mac_verify_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_raw_decrypt_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11330,7 +11620,7 @@ async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.mac_verify + client._client._transport.raw_decrypt in client._client._transport._wrapped_methods ) @@ -11338,16 +11628,16 @@ async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.mac_verify + client._client._transport.raw_decrypt ] = mock_rpc request = {} - await client.mac_verify(request) + await client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.mac_verify(request) + await client.raw_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11355,8 +11645,8 @@ async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_mac_verify_async( - transport: str = "grpc_asyncio", request_type=service.MacVerifyRequest +async def test_raw_decrypt_async( + transport: str = "grpc_asyncio", request_type=service.RawDecryptRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -11368,56 +11658,54 @@ async def test_mac_verify_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacVerifyResponse( - name="name_value", - success=True, - verified_data_crc32c=True, - verified_mac_crc32c=True, - verified_success_integrity=True, + service.RawDecryptResponse( + plaintext=b"plaintext_blob", protection_level=resources.ProtectionLevel.SOFTWARE, + verified_ciphertext_crc32c=True, + verified_additional_authenticated_data_crc32c=True, + verified_initialization_vector_crc32c=True, ) ) - response = await client.mac_verify(request) + response = await client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.MacVerifyRequest() + request = service.RawDecryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.MacVerifyResponse) - assert response.name == "name_value" - assert response.success is True - assert response.verified_data_crc32c is True - assert response.verified_mac_crc32c is True - assert response.verified_success_integrity is True + assert isinstance(response, service.RawDecryptResponse) + assert response.plaintext == b"plaintext_blob" assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert response.verified_ciphertext_crc32c is True + assert response.verified_additional_authenticated_data_crc32c is True + assert response.verified_initialization_vector_crc32c is True @pytest.mark.asyncio -async def test_mac_verify_async_from_dict(): - await test_mac_verify_async(request_type=dict) +async def test_raw_decrypt_async_from_dict(): + await test_raw_decrypt_async(request_type=dict) -def test_mac_verify_field_headers(): +def test_raw_decrypt_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.MacVerifyRequest() + request = service.RawDecryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: - call.return_value = service.MacVerifyResponse() - client.mac_verify(request) + with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: + call.return_value = service.RawDecryptResponse() + client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11433,23 +11721,23 @@ def test_mac_verify_field_headers(): @pytest.mark.asyncio -async def test_mac_verify_field_headers_async(): +async def test_raw_decrypt_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.MacVerifyRequest() + request = service.RawDecryptRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + with mock.patch.object(type(client.transport.raw_decrypt), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacVerifyResponse() + service.RawDecryptResponse() ) - await client.mac_verify(request) + await client.raw_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11464,153 +11752,51 @@ async def test_mac_verify_field_headers_async(): ) in kw["metadata"] -def test_mac_verify_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + service.AsymmetricSignRequest, + dict, + ], +) +def test_asymmetric_sign(request_type, transport: str = "grpc"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.MacVerifyResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.mac_verify( + call.return_value = service.AsymmetricSignResponse( + signature=b"signature_blob", + verified_digest_crc32c=True, name="name_value", - data=b"data_blob", - mac=b"mac_blob", + verified_data_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, ) + response = client.asymmetric_sign(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].data - mock_val = b"data_blob" - assert arg == mock_val - arg = args[0].mac - mock_val = b"mac_blob" - assert arg == mock_val - - -def test_mac_verify_flattened_error(): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.mac_verify( - service.MacVerifyRequest(), - name="name_value", - data=b"data_blob", - mac=b"mac_blob", - ) - - -@pytest.mark.asyncio -async def test_mac_verify_flattened_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.MacVerifyResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.MacVerifyResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.mac_verify( - name="name_value", - data=b"data_blob", - mac=b"mac_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].data - mock_val = b"data_blob" - assert arg == mock_val - arg = args[0].mac - mock_val = b"mac_blob" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_mac_verify_flattened_error_async(): - client = KeyManagementServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.mac_verify( - service.MacVerifyRequest(), - name="name_value", - data=b"data_blob", - mac=b"mac_blob", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - service.DecapsulateRequest, - dict, - ], -) -def test_decapsulate(request_type, transport: str = "grpc"): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = service.DecapsulateResponse( - name="name_value", - shared_secret=b"shared_secret_blob", - shared_secret_crc32c=1979, - verified_ciphertext_crc32c=True, - protection_level=resources.ProtectionLevel.SOFTWARE, - ) - response = client.decapsulate(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DecapsulateRequest() + request = service.AsymmetricSignRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.DecapsulateResponse) + assert isinstance(response, service.AsymmetricSignResponse) + assert response.signature == b"signature_blob" + assert response.verified_digest_crc32c is True assert response.name == "name_value" - assert response.shared_secret == b"shared_secret_blob" - assert response.shared_secret_crc32c == 1979 - assert response.verified_ciphertext_crc32c is True + assert response.verified_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE -def test_decapsulate_non_empty_request_with_auto_populated_field(): +def test_asymmetric_sign_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = KeyManagementServiceClient( @@ -11621,24 +11807,24 @@ def test_decapsulate_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DecapsulateRequest( + request = service.AsymmetricSignRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.decapsulate(request=request) + client.asymmetric_sign(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DecapsulateRequest( + assert args[0] == service.AsymmetricSignRequest( name="name_value", ) -def test_decapsulate_use_cached_wrapped_rpc(): +def test_asymmetric_sign_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11652,21 +11838,21 @@ def test_decapsulate_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.decapsulate in client._transport._wrapped_methods + assert client._transport.asymmetric_sign in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.decapsulate] = mock_rpc + client._transport._wrapped_methods[client._transport.asymmetric_sign] = mock_rpc request = {} - client.decapsulate(request) + client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.decapsulate(request) + client.asymmetric_sign(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11674,7 +11860,7 @@ def test_decapsulate_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_decapsulate_async_use_cached_wrapped_rpc( +async def test_asymmetric_sign_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11691,7 +11877,7 @@ async def test_decapsulate_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.decapsulate + client._client._transport.asymmetric_sign in client._client._transport._wrapped_methods ) @@ -11699,16 +11885,16 @@ async def test_decapsulate_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.decapsulate + client._client._transport.asymmetric_sign ] = mock_rpc request = {} - await client.decapsulate(request) + await client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.decapsulate(request) + await client.asymmetric_sign(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11716,8 +11902,8 @@ async def test_decapsulate_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_decapsulate_async( - transport: str = "grpc_asyncio", request_type=service.DecapsulateRequest +async def test_asymmetric_sign_async( + transport: str = "grpc_asyncio", request_type=service.AsymmetricSignRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -11729,54 +11915,54 @@ async def test_decapsulate_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.DecapsulateResponse( + service.AsymmetricSignResponse( + signature=b"signature_blob", + verified_digest_crc32c=True, name="name_value", - shared_secret=b"shared_secret_blob", - shared_secret_crc32c=1979, - verified_ciphertext_crc32c=True, + verified_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) ) - response = await client.decapsulate(request) + response = await client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DecapsulateRequest() + request = service.AsymmetricSignRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.DecapsulateResponse) + assert isinstance(response, service.AsymmetricSignResponse) + assert response.signature == b"signature_blob" + assert response.verified_digest_crc32c is True assert response.name == "name_value" - assert response.shared_secret == b"shared_secret_blob" - assert response.shared_secret_crc32c == 1979 - assert response.verified_ciphertext_crc32c is True + assert response.verified_data_crc32c is True assert response.protection_level == resources.ProtectionLevel.SOFTWARE @pytest.mark.asyncio -async def test_decapsulate_async_from_dict(): - await test_decapsulate_async(request_type=dict) +async def test_asymmetric_sign_async_from_dict(): + await test_asymmetric_sign_async(request_type=dict) -def test_decapsulate_field_headers(): +def test_asymmetric_sign_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DecapsulateRequest() + request = service.AsymmetricSignRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: - call.return_value = service.DecapsulateResponse() - client.decapsulate(request) + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + call.return_value = service.AsymmetricSignResponse() + client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11792,23 +11978,23 @@ def test_decapsulate_field_headers(): @pytest.mark.asyncio -async def test_decapsulate_field_headers_async(): +async def test_asymmetric_sign_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DecapsulateRequest() + request = service.AsymmetricSignRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.DecapsulateResponse() + service.AsymmetricSignResponse() ) - await client.decapsulate(request) + await client.asymmetric_sign(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11823,75 +12009,171 @@ async def test_decapsulate_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - service.GenerateRandomBytesRequest, - dict, - ], -) -def test_generate_random_bytes(request_type, transport: str = "grpc"): +def test_asymmetric_sign_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" - ) as call: + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateRandomBytesResponse( - data=b"data_blob", + call.return_value = service.AsymmetricSignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.asymmetric_sign( + name="name_value", + digest=service.Digest(sha256=b"sha256_blob"), ) - response = client.generate_random_bytes(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GenerateRandomBytesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateRandomBytesResponse) - assert response.data == b"data_blob" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].digest + mock_val = service.Digest(sha256=b"sha256_blob") + assert arg == mock_val -def test_generate_random_bytes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_asymmetric_sign_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GenerateRandomBytesRequest( - location="location_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.asymmetric_sign( + service.AsymmetricSignRequest(), + name="name_value", + digest=service.Digest(sha256=b"sha256_blob"), + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + +@pytest.mark.asyncio +async def test_asymmetric_sign_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.asymmetric_sign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.AsymmetricSignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.AsymmetricSignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.asymmetric_sign( + name="name_value", + digest=service.Digest(sha256=b"sha256_blob"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].digest + mock_val = service.Digest(sha256=b"sha256_blob") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_asymmetric_sign_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.asymmetric_sign( + service.AsymmetricSignRequest(), + name="name_value", + digest=service.Digest(sha256=b"sha256_blob"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.AsymmetricDecryptRequest, + dict, + ], +) +def test_asymmetric_decrypt(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.asymmetric_decrypt), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.AsymmetricDecryptResponse( + plaintext=b"plaintext_blob", + verified_ciphertext_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + response = client.asymmetric_decrypt(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.AsymmetricDecryptRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.AsymmetricDecryptResponse) + assert response.plaintext == b"plaintext_blob" + assert response.verified_ciphertext_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +def test_asymmetric_decrypt_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.AsymmetricDecryptRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.asymmetric_decrypt), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.generate_random_bytes(request=request) + client.asymmetric_decrypt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GenerateRandomBytesRequest( - location="location_value", + assert args[0] == service.AsymmetricDecryptRequest( + name="name_value", ) -def test_generate_random_bytes_use_cached_wrapped_rpc(): +def test_asymmetric_decrypt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11906,8 +12188,7 @@ def test_generate_random_bytes_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.generate_random_bytes - in client._transport._wrapped_methods + client._transport.asymmetric_decrypt in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -11916,15 +12197,15 @@ def test_generate_random_bytes_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.generate_random_bytes + client._transport.asymmetric_decrypt ] = mock_rpc request = {} - client.generate_random_bytes(request) + client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.generate_random_bytes(request) + client.asymmetric_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11932,7 +12213,7 @@ def test_generate_random_bytes_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_generate_random_bytes_async_use_cached_wrapped_rpc( +async def test_asymmetric_decrypt_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11949,7 +12230,7 @@ async def test_generate_random_bytes_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.generate_random_bytes + client._client._transport.asymmetric_decrypt in client._client._transport._wrapped_methods ) @@ -11957,16 +12238,16 @@ async def test_generate_random_bytes_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.generate_random_bytes + client._client._transport.asymmetric_decrypt ] = mock_rpc request = {} - await client.generate_random_bytes(request) + await client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.generate_random_bytes(request) + await client.asymmetric_decrypt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11974,8 +12255,8 @@ async def test_generate_random_bytes_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_generate_random_bytes_async( - transport: str = "grpc_asyncio", request_type=service.GenerateRandomBytesRequest +async def test_asymmetric_decrypt_async( + transport: str = "grpc_asyncio", request_type=service.AsymmetricDecryptRequest ): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -11988,49 +12269,53 @@ async def test_generate_random_bytes_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + type(client.transport.asymmetric_decrypt), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateRandomBytesResponse( - data=b"data_blob", + service.AsymmetricDecryptResponse( + plaintext=b"plaintext_blob", + verified_ciphertext_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, ) ) - response = await client.generate_random_bytes(request) + response = await client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GenerateRandomBytesRequest() + request = service.AsymmetricDecryptRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateRandomBytesResponse) - assert response.data == b"data_blob" + assert isinstance(response, service.AsymmetricDecryptResponse) + assert response.plaintext == b"plaintext_blob" + assert response.verified_ciphertext_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE @pytest.mark.asyncio -async def test_generate_random_bytes_async_from_dict(): - await test_generate_random_bytes_async(request_type=dict) +async def test_asymmetric_decrypt_async_from_dict(): + await test_asymmetric_decrypt_async(request_type=dict) -def test_generate_random_bytes_field_headers(): +def test_asymmetric_decrypt_field_headers(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateRandomBytesRequest() + request = service.AsymmetricDecryptRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + type(client.transport.asymmetric_decrypt), "__call__" ) as call: - call.return_value = service.GenerateRandomBytesResponse() - client.generate_random_bytes(request) + call.return_value = service.AsymmetricDecryptResponse() + client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12041,30 +12326,30 @@ def test_generate_random_bytes_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_generate_random_bytes_field_headers_async(): +async def test_asymmetric_decrypt_field_headers_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateRandomBytesRequest() + request = service.AsymmetricDecryptRequest() - request.location = "location_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + type(client.transport.asymmetric_decrypt), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateRandomBytesResponse() + service.AsymmetricDecryptResponse() ) - await client.generate_random_bytes(request) + await client.asymmetric_decrypt(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12075,45 +12360,41 @@ async def test_generate_random_bytes_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "name=name_value", ) in kw["metadata"] -def test_generate_random_bytes_flattened(): +def test_asymmetric_decrypt_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + type(client.transport.asymmetric_decrypt), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateRandomBytesResponse() + call.return_value = service.AsymmetricDecryptResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.generate_random_bytes( - location="location_value", - length_bytes=1288, - protection_level=resources.ProtectionLevel.SOFTWARE, + client.asymmetric_decrypt( + name="name_value", + ciphertext=b"ciphertext_blob", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].length_bytes - mock_val = 1288 + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - arg = args[0].protection_level - mock_val = resources.ProtectionLevel.SOFTWARE + arg = args[0].ciphertext + mock_val = b"ciphertext_blob" assert arg == mock_val -def test_generate_random_bytes_flattened_error(): +def test_asymmetric_decrypt_flattened_error(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12121,55 +12402,50 @@ def test_generate_random_bytes_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_random_bytes( - service.GenerateRandomBytesRequest(), - location="location_value", - length_bytes=1288, - protection_level=resources.ProtectionLevel.SOFTWARE, + client.asymmetric_decrypt( + service.AsymmetricDecryptRequest(), + name="name_value", + ciphertext=b"ciphertext_blob", ) @pytest.mark.asyncio -async def test_generate_random_bytes_flattened_async(): +async def test_asymmetric_decrypt_flattened_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_random_bytes), "__call__" + type(client.transport.asymmetric_decrypt), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateRandomBytesResponse() + call.return_value = service.AsymmetricDecryptResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateRandomBytesResponse() + service.AsymmetricDecryptResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.generate_random_bytes( - location="location_value", - length_bytes=1288, - protection_level=resources.ProtectionLevel.SOFTWARE, + response = await client.asymmetric_decrypt( + name="name_value", + ciphertext=b"ciphertext_blob", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].length_bytes - mock_val = 1288 + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - arg = args[0].protection_level - mock_val = resources.ProtectionLevel.SOFTWARE + arg = args[0].ciphertext + mock_val = b"ciphertext_blob" assert arg == mock_val @pytest.mark.asyncio -async def test_generate_random_bytes_flattened_error_async(): +async def test_asymmetric_decrypt_flattened_error_async(): client = KeyManagementServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12177,15 +12453,2393 @@ async def test_generate_random_bytes_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.generate_random_bytes( - service.GenerateRandomBytesRequest(), - location="location_value", - length_bytes=1288, + await client.asymmetric_decrypt( + service.AsymmetricDecryptRequest(), + name="name_value", + ciphertext=b"ciphertext_blob", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.MacSignRequest, + dict, + ], +) +def test_mac_sign(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacSignResponse( + name="name_value", + mac=b"mac_blob", + verified_data_crc32c=True, protection_level=resources.ProtectionLevel.SOFTWARE, ) + response = client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.MacSignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.MacSignResponse) + assert response.name == "name_value" + assert response.mac == b"mac_blob" + assert response.verified_data_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +def test_mac_sign_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.MacSignRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.mac_sign(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.MacSignRequest( + name="name_value", + ) + + +def test_mac_sign_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mac_sign in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mac_sign] = mock_rpc + request = {} + client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mac_sign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mac_sign_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.mac_sign + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.mac_sign + ] = mock_rpc + + request = {} + await client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.mac_sign(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mac_sign_async( + transport: str = "grpc_asyncio", request_type=service.MacSignRequest +): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacSignResponse( + name="name_value", + mac=b"mac_blob", + verified_data_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + ) + response = await client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.MacSignRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.MacSignResponse) + assert response.name == "name_value" + assert response.mac == b"mac_blob" + assert response.verified_data_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +@pytest.mark.asyncio +async def test_mac_sign_async_from_dict(): + await test_mac_sign_async(request_type=dict) + + +def test_mac_sign_field_headers(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.MacSignRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + call.return_value = service.MacSignResponse() + client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_mac_sign_field_headers_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.MacSignRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacSignResponse() + ) + await client.mac_sign(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_mac_sign_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacSignResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.mac_sign( + name="name_value", + data=b"data_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].data + mock_val = b"data_blob" + assert arg == mock_val + + +def test_mac_sign_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.mac_sign( + service.MacSignRequest(), + name="name_value", + data=b"data_blob", + ) + + +@pytest.mark.asyncio +async def test_mac_sign_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_sign), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacSignResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacSignResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.mac_sign( + name="name_value", + data=b"data_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].data + mock_val = b"data_blob" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_mac_sign_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.mac_sign( + service.MacSignRequest(), + name="name_value", + data=b"data_blob", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.MacVerifyRequest, + dict, + ], +) +def test_mac_verify(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacVerifyResponse( + name="name_value", + success=True, + verified_data_crc32c=True, + verified_mac_crc32c=True, + verified_success_integrity=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + response = client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.MacVerifyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.MacVerifyResponse) + assert response.name == "name_value" + assert response.success is True + assert response.verified_data_crc32c is True + assert response.verified_mac_crc32c is True + assert response.verified_success_integrity is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +def test_mac_verify_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.MacVerifyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.mac_verify(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.MacVerifyRequest( + name="name_value", + ) + + +def test_mac_verify_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mac_verify in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mac_verify] = mock_rpc + request = {} + client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mac_verify(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mac_verify_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.mac_verify + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.mac_verify + ] = mock_rpc + + request = {} + await client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.mac_verify(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mac_verify_async( + transport: str = "grpc_asyncio", request_type=service.MacVerifyRequest +): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacVerifyResponse( + name="name_value", + success=True, + verified_data_crc32c=True, + verified_mac_crc32c=True, + verified_success_integrity=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + ) + response = await client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.MacVerifyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.MacVerifyResponse) + assert response.name == "name_value" + assert response.success is True + assert response.verified_data_crc32c is True + assert response.verified_mac_crc32c is True + assert response.verified_success_integrity is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +@pytest.mark.asyncio +async def test_mac_verify_async_from_dict(): + await test_mac_verify_async(request_type=dict) + + +def test_mac_verify_field_headers(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.MacVerifyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + call.return_value = service.MacVerifyResponse() + client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_mac_verify_field_headers_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.MacVerifyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacVerifyResponse() + ) + await client.mac_verify(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_mac_verify_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacVerifyResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.mac_verify( + name="name_value", + data=b"data_blob", + mac=b"mac_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].data + mock_val = b"data_blob" + assert arg == mock_val + arg = args[0].mac + mock_val = b"mac_blob" + assert arg == mock_val + + +def test_mac_verify_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.mac_verify( + service.MacVerifyRequest(), + name="name_value", + data=b"data_blob", + mac=b"mac_blob", + ) + + +@pytest.mark.asyncio +async def test_mac_verify_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mac_verify), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.MacVerifyResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.MacVerifyResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.mac_verify( + name="name_value", + data=b"data_blob", + mac=b"mac_blob", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].data + mock_val = b"data_blob" + assert arg == mock_val + arg = args[0].mac + mock_val = b"mac_blob" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_mac_verify_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.mac_verify( + service.MacVerifyRequest(), + name="name_value", + data=b"data_blob", + mac=b"mac_blob", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DecapsulateRequest, + dict, + ], +) +def test_decapsulate(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.DecapsulateResponse( + name="name_value", + shared_secret=b"shared_secret_blob", + shared_secret_crc32c=1979, + verified_ciphertext_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + response = client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DecapsulateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.DecapsulateResponse) + assert response.name == "name_value" + assert response.shared_secret == b"shared_secret_blob" + assert response.shared_secret_crc32c == 1979 + assert response.verified_ciphertext_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +def test_decapsulate_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DecapsulateRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.decapsulate(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DecapsulateRequest( + name="name_value", + ) + + +def test_decapsulate_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.decapsulate in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.decapsulate] = mock_rpc + request = {} + client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.decapsulate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_decapsulate_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.decapsulate + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.decapsulate + ] = mock_rpc + + request = {} + await client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.decapsulate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_decapsulate_async( + transport: str = "grpc_asyncio", request_type=service.DecapsulateRequest +): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.DecapsulateResponse( + name="name_value", + shared_secret=b"shared_secret_blob", + shared_secret_crc32c=1979, + verified_ciphertext_crc32c=True, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + ) + response = await client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DecapsulateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.DecapsulateResponse) + assert response.name == "name_value" + assert response.shared_secret == b"shared_secret_blob" + assert response.shared_secret_crc32c == 1979 + assert response.verified_ciphertext_crc32c is True + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + + +@pytest.mark.asyncio +async def test_decapsulate_async_from_dict(): + await test_decapsulate_async(request_type=dict) + + +def test_decapsulate_field_headers(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DecapsulateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + call.return_value = service.DecapsulateResponse() + client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_decapsulate_field_headers_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DecapsulateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.decapsulate), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.DecapsulateResponse() + ) + await client.decapsulate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateRandomBytesRequest, + dict, + ], +) +def test_generate_random_bytes(request_type, transport: str = "grpc"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateRandomBytesResponse( + data=b"data_blob", + ) + response = client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GenerateRandomBytesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateRandomBytesResponse) + assert response.data == b"data_blob" + + +def test_generate_random_bytes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GenerateRandomBytesRequest( + location="location_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_random_bytes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateRandomBytesRequest( + location="location_value", + ) + + +def test_generate_random_bytes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_random_bytes + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_random_bytes + ] = mock_rpc + request = {} + client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_random_bytes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_random_bytes_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_random_bytes + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_random_bytes + ] = mock_rpc + + request = {} + await client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_random_bytes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_random_bytes_async( + transport: str = "grpc_asyncio", request_type=service.GenerateRandomBytesRequest +): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateRandomBytesResponse( + data=b"data_blob", + ) + ) + response = await client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GenerateRandomBytesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateRandomBytesResponse) + assert response.data == b"data_blob" + + +@pytest.mark.asyncio +async def test_generate_random_bytes_async_from_dict(): + await test_generate_random_bytes_async(request_type=dict) + + +def test_generate_random_bytes_field_headers(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateRandomBytesRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + call.return_value = service.GenerateRandomBytesResponse() + client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_random_bytes_field_headers_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateRandomBytesRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateRandomBytesResponse() + ) + await client.generate_random_bytes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_generate_random_bytes_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateRandomBytesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_random_bytes( + location="location_value", + length_bytes=1288, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].length_bytes + mock_val = 1288 + assert arg == mock_val + arg = args[0].protection_level + mock_val = resources.ProtectionLevel.SOFTWARE + assert arg == mock_val + + +def test_generate_random_bytes_flattened_error(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_random_bytes( + service.GenerateRandomBytesRequest(), + location="location_value", + length_bytes=1288, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + + +@pytest.mark.asyncio +async def test_generate_random_bytes_flattened_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_random_bytes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateRandomBytesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateRandomBytesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_random_bytes( + location="location_value", + length_bytes=1288, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].length_bytes + mock_val = 1288 + assert arg == mock_val + arg = args[0].protection_level + mock_val = resources.ProtectionLevel.SOFTWARE + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_random_bytes_flattened_error_async(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_random_bytes( + service.GenerateRandomBytesRequest(), + location="location_value", + length_bytes=1288, + protection_level=resources.ProtectionLevel.SOFTWARE, + ) + + +def test_list_key_rings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_key_rings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_key_rings] = mock_rpc + + request = {} + client.list_key_rings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_key_rings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRequest): + transport_class = transports.KeyManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_key_rings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_key_rings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListKeyRingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListKeyRingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_key_rings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_key_rings_rest_unset_required_fields(): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_key_rings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_key_rings_rest_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListKeyRingsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListKeyRingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_key_rings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/keyRings" % client.transport._host, + args[1], + ) + + +def test_list_key_rings_rest_flattened_error(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_key_rings( + service.ListKeyRingsRequest(), + parent="parent_value", + ) + + +def test_list_key_rings_rest_pager(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListKeyRingsResponse( + key_rings=[ + resources.KeyRing(), + resources.KeyRing(), + resources.KeyRing(), + ], + next_page_token="abc", + ), + service.ListKeyRingsResponse( + key_rings=[], + next_page_token="def", + ), + service.ListKeyRingsResponse( + key_rings=[ + resources.KeyRing(), + ], + next_page_token="ghi", + ), + service.ListKeyRingsResponse( + key_rings=[ + resources.KeyRing(), + resources.KeyRing(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListKeyRingsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_key_rings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.KeyRing) for i in results) + + pages = list(client.list_key_rings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_crypto_keys_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_crypto_keys in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_crypto_keys + ] = mock_rpc + + request = {} + client.list_crypto_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_crypto_keys(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_crypto_keys_rest_required_fields( + request_type=service.ListCryptoKeysRequest, +): + transport_class = transports.KeyManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_crypto_keys._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_crypto_keys._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "version_view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListCryptoKeysResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListCryptoKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_crypto_keys(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_crypto_keys_rest_unset_required_fields(): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_crypto_keys._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "versionView", + ) + ) + & set(("parent",)) + ) + + +def test_list_crypto_keys_rest_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListCryptoKeysResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCryptoKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_crypto_keys(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys" + % client.transport._host, + args[1], + ) + + +def test_list_crypto_keys_rest_flattened_error(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_crypto_keys( + service.ListCryptoKeysRequest(), + parent="parent_value", + ) + + +def test_list_crypto_keys_rest_pager(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListCryptoKeysResponse( + crypto_keys=[ + resources.CryptoKey(), + resources.CryptoKey(), + resources.CryptoKey(), + ], + next_page_token="abc", + ), + service.ListCryptoKeysResponse( + crypto_keys=[], + next_page_token="def", + ), + service.ListCryptoKeysResponse( + crypto_keys=[ + resources.CryptoKey(), + ], + next_page_token="ghi", + ), + service.ListCryptoKeysResponse( + crypto_keys=[ + resources.CryptoKey(), + resources.CryptoKey(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListCryptoKeysResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3" + } + + pager = client.list_crypto_keys(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.CryptoKey) for i in results) + + pages = list(client.list_crypto_keys(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_crypto_key_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_crypto_key_versions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_crypto_key_versions + ] = mock_rpc + + request = {} + client.list_crypto_key_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_crypto_key_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_crypto_key_versions_rest_required_fields( + request_type=service.ListCryptoKeyVersionsRequest, +): + transport_class = transports.KeyManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_crypto_key_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_crypto_key_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListCryptoKeyVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListCryptoKeyVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_crypto_key_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_crypto_key_versions_rest_unset_required_fields(): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_crypto_key_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +def test_list_crypto_key_versions_rest_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListCryptoKeyVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListCryptoKeyVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_crypto_key_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions" + % client.transport._host, + args[1], + ) + + +def test_list_crypto_key_versions_rest_flattened_error(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_crypto_key_versions( + service.ListCryptoKeyVersionsRequest(), + parent="parent_value", + ) + + +def test_list_crypto_key_versions_rest_pager(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListCryptoKeyVersionsResponse( + crypto_key_versions=[ + resources.CryptoKeyVersion(), + resources.CryptoKeyVersion(), + resources.CryptoKeyVersion(), + ], + next_page_token="abc", + ), + service.ListCryptoKeyVersionsResponse( + crypto_key_versions=[], + next_page_token="def", + ), + service.ListCryptoKeyVersionsResponse( + crypto_key_versions=[ + resources.CryptoKeyVersion(), + ], + next_page_token="ghi", + ), + service.ListCryptoKeyVersionsResponse( + crypto_key_versions=[ + resources.CryptoKeyVersion(), + resources.CryptoKeyVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListCryptoKeyVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + } + + pager = client.list_crypto_key_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.CryptoKeyVersion) for i in results) + + pages = list(client.list_crypto_key_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_import_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_import_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_import_jobs + ] = mock_rpc + + request = {} + client.list_import_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_import_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_import_jobs_rest_required_fields( + request_type=service.ListImportJobsRequest, +): + transport_class = transports.KeyManagementServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_import_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_import_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListImportJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListImportJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_import_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_import_jobs_rest_unset_required_fields(): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_import_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_import_jobs_rest_flattened(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListImportJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListImportJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_import_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/keyRings/*}/importJobs" + % client.transport._host, + args[1], + ) + + +def test_list_import_jobs_rest_flattened_error(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_import_jobs( + service.ListImportJobsRequest(), + parent="parent_value", + ) + + +def test_list_import_jobs_rest_pager(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListImportJobsResponse( + import_jobs=[ + resources.ImportJob(), + resources.ImportJob(), + resources.ImportJob(), + ], + next_page_token="abc", + ), + service.ListImportJobsResponse( + import_jobs=[], + next_page_token="def", + ), + service.ListImportJobsResponse( + import_jobs=[ + resources.ImportJob(), + ], + next_page_token="ghi", + ), + service.ListImportJobsResponse( + import_jobs=[ + resources.ImportJob(), + resources.ImportJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListImportJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3" + } + pager = client.list_import_jobs(request=sample_request) -def test_list_key_rings_rest_use_cached_wrapped_rpc(): + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ImportJob) for i in results) + + pages = list(client.list_import_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_retired_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12199,29 +14853,36 @@ def test_list_key_rings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_key_rings in client._transport._wrapped_methods + assert ( + client._transport.list_retired_resources + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_key_rings] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_retired_resources + ] = mock_rpc request = {} - client.list_key_rings(request) + client.list_retired_resources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_key_rings(request) + client.list_retired_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRequest): +def test_list_retired_resources_rest_required_fields( + request_type=service.ListRetiredResourcesRequest, +): transport_class = transports.KeyManagementServiceRestTransport request_init = {} @@ -12236,7 +14897,7 @@ def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_key_rings._get_unset_required_fields(jsonified_request) + ).list_retired_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12245,12 +14906,10 @@ def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_key_rings._get_unset_required_fields(jsonified_request) + ).list_retired_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", "page_size", "page_token", ) @@ -12268,7 +14927,7 @@ def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListKeyRingsResponse() + return_value = service.ListRetiredResourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12289,31 +14948,29 @@ def test_list_key_rings_rest_required_fields(request_type=service.ListKeyRingsRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListKeyRingsResponse.pb(return_value) + return_value = service.ListRetiredResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_key_rings(request) + response = client.list_retired_resources(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_key_rings_rest_unset_required_fields(): +def test_list_retired_resources_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_key_rings._get_unset_required_fields({}) + unset_fields = transport.list_retired_resources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", "pageSize", "pageToken", ) @@ -12322,7 +14979,7 @@ def test_list_key_rings_rest_unset_required_fields(): ) -def test_list_key_rings_rest_flattened(): +def test_list_retired_resources_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12331,7 +14988,7 @@ def test_list_key_rings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListKeyRingsResponse() + return_value = service.ListRetiredResourcesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -12346,25 +15003,26 @@ def test_list_key_rings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListKeyRingsResponse.pb(return_value) + return_value = service.ListRetiredResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_key_rings(**mock_args) + client.list_retired_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/keyRings" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/retiredResources" + % client.transport._host, args[1], ) -def test_list_key_rings_rest_flattened_error(transport: str = "rest"): +def test_list_retired_resources_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12373,13 +15031,13 @@ def test_list_key_rings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_key_rings( - service.ListKeyRingsRequest(), + client.list_retired_resources( + service.ListRetiredResourcesRequest(), parent="parent_value", ) -def test_list_key_rings_rest_pager(transport: str = "rest"): +def test_list_retired_resources_rest_pager(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12391,28 +15049,28 @@ def test_list_key_rings_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - service.ListKeyRingsResponse( - key_rings=[ - resources.KeyRing(), - resources.KeyRing(), - resources.KeyRing(), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), + resources.RetiredResource(), ], next_page_token="abc", ), - service.ListKeyRingsResponse( - key_rings=[], + service.ListRetiredResourcesResponse( + retired_resources=[], next_page_token="def", ), - service.ListKeyRingsResponse( - key_rings=[ - resources.KeyRing(), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), ], next_page_token="ghi", ), - service.ListKeyRingsResponse( - key_rings=[ - resources.KeyRing(), - resources.KeyRing(), + service.ListRetiredResourcesResponse( + retired_resources=[ + resources.RetiredResource(), + resources.RetiredResource(), ], ), ) @@ -12420,7 +15078,9 @@ def test_list_key_rings_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(service.ListKeyRingsResponse.to_json(x) for x in response) + response = tuple( + service.ListRetiredResourcesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -12429,18 +15089,18 @@ def test_list_key_rings_rest_pager(transport: str = "rest"): sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_key_rings(request=sample_request) + pager = client.list_retired_resources(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.KeyRing) for i in results) + assert all(isinstance(i, resources.RetiredResource) for i in results) - pages = list(client.list_key_rings(request=sample_request).pages) + pages = list(client.list_retired_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_list_crypto_keys_rest_use_cached_wrapped_rpc(): +def test_get_key_ring_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12454,37 +15114,33 @@ def test_list_crypto_keys_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_crypto_keys in client._transport._wrapped_methods + assert client._transport.get_key_ring in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_crypto_keys - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_key_ring] = mock_rpc request = {} - client.list_crypto_keys(request) + client.get_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_crypto_keys(request) + client.get_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_crypto_keys_rest_required_fields( - request_type=service.ListCryptoKeysRequest, -): +def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingRequest): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12495,31 +15151,21 @@ def test_list_crypto_keys_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_crypto_keys._get_unset_required_fields(jsonified_request) + ).get_key_ring._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_crypto_keys._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "version_view", - ) - ) + ).get_key_ring._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12528,7 +15174,7 @@ def test_list_crypto_keys_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListCryptoKeysResponse() + return_value = resources.KeyRing() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12549,41 +15195,30 @@ def test_list_crypto_keys_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCryptoKeysResponse.pb(return_value) + return_value = resources.KeyRing.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_crypto_keys(request) + response = client.get_key_ring(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_crypto_keys_rest_unset_required_fields(): +def test_get_key_ring_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_crypto_keys._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "versionView", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_key_ring._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_crypto_keys_rest_flattened(): +def test_get_key_ring_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12592,16 +15227,14 @@ def test_list_crypto_keys_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCryptoKeysResponse() + return_value = resources.KeyRing() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/keyRings/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -12609,26 +15242,25 @@ def test_list_crypto_keys_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCryptoKeysResponse.pb(return_value) + return_value = resources.KeyRing.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_crypto_keys(**mock_args) + client.get_key_ring(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/keyRings/*}" % client.transport._host, args[1], ) -def test_list_crypto_keys_rest_flattened_error(transport: str = "rest"): +def test_get_key_ring_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12637,76 +15269,13 @@ def test_list_crypto_keys_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_crypto_keys( - service.ListCryptoKeysRequest(), - parent="parent_value", - ) - - -def test_list_crypto_keys_rest_pager(transport: str = "rest"): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListCryptoKeysResponse( - crypto_keys=[ - resources.CryptoKey(), - resources.CryptoKey(), - resources.CryptoKey(), - ], - next_page_token="abc", - ), - service.ListCryptoKeysResponse( - crypto_keys=[], - next_page_token="def", - ), - service.ListCryptoKeysResponse( - crypto_keys=[ - resources.CryptoKey(), - ], - next_page_token="ghi", - ), - service.ListCryptoKeysResponse( - crypto_keys=[ - resources.CryptoKey(), - resources.CryptoKey(), - ], - ), + client.get_key_ring( + service.GetKeyRingRequest(), + name="name_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListCryptoKeysResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3" - } - - pager = client.list_crypto_keys(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CryptoKey) for i in results) - - pages = list(client.list_crypto_keys(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_list_crypto_key_versions_rest_use_cached_wrapped_rpc(): +def test_get_crypto_key_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12720,40 +15289,33 @@ def test_list_crypto_key_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_crypto_key_versions - in client._transport._wrapped_methods - ) + assert client._transport.get_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_crypto_key_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_crypto_key] = mock_rpc request = {} - client.list_crypto_key_versions(request) + client.get_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_crypto_key_versions(request) + client.get_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_crypto_key_versions_rest_required_fields( - request_type=service.ListCryptoKeyVersionsRequest, -): +def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRequest): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12764,31 +15326,21 @@ def test_list_crypto_key_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_crypto_key_versions._get_unset_required_fields(jsonified_request) + ).get_crypto_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_crypto_key_versions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "view", - ) - ) + ).get_crypto_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12797,7 +15349,7 @@ def test_list_crypto_key_versions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListCryptoKeyVersionsResponse() + return_value = resources.CryptoKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12818,41 +15370,30 @@ def test_list_crypto_key_versions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCryptoKeyVersionsResponse.pb(return_value) + return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_crypto_key_versions(request) + response = client.get_crypto_key(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_crypto_key_versions_rest_unset_required_fields(): +def test_get_crypto_key_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_crypto_key_versions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "view", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_crypto_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_crypto_key_versions_rest_flattened(): +def test_get_crypto_key_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12861,16 +15402,16 @@ def test_list_crypto_key_versions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListCryptoKeyVersionsResponse() + return_value = resources.CryptoKey() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -12878,26 +15419,26 @@ def test_list_crypto_key_versions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListCryptoKeyVersionsResponse.pb(return_value) + return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_crypto_key_versions(**mock_args) + client.get_crypto_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions" + "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}" % client.transport._host, args[1], ) -def test_list_crypto_key_versions_rest_flattened_error(transport: str = "rest"): +def test_get_crypto_key_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12906,78 +15447,13 @@ def test_list_crypto_key_versions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_crypto_key_versions( - service.ListCryptoKeyVersionsRequest(), - parent="parent_value", - ) - - -def test_list_crypto_key_versions_rest_pager(transport: str = "rest"): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListCryptoKeyVersionsResponse( - crypto_key_versions=[ - resources.CryptoKeyVersion(), - resources.CryptoKeyVersion(), - resources.CryptoKeyVersion(), - ], - next_page_token="abc", - ), - service.ListCryptoKeyVersionsResponse( - crypto_key_versions=[], - next_page_token="def", - ), - service.ListCryptoKeyVersionsResponse( - crypto_key_versions=[ - resources.CryptoKeyVersion(), - ], - next_page_token="ghi", - ), - service.ListCryptoKeyVersionsResponse( - crypto_key_versions=[ - resources.CryptoKeyVersion(), - resources.CryptoKeyVersion(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - service.ListCryptoKeyVersionsResponse.to_json(x) for x in response + client.get_crypto_key( + service.GetCryptoKeyRequest(), + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" - } - - pager = client.list_crypto_key_versions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.CryptoKeyVersion) for i in results) - - pages = list(client.list_crypto_key_versions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_list_import_jobs_rest_use_cached_wrapped_rpc(): +def test_get_crypto_key_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12991,7 +15467,10 @@ def test_list_import_jobs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_import_jobs in client._transport._wrapped_methods + assert ( + client._transport.get_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12999,29 +15478,29 @@ def test_list_import_jobs_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_import_jobs + client._transport.get_crypto_key_version ] = mock_rpc request = {} - client.list_import_jobs(request) + client.get_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_import_jobs(request) + client.get_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_import_jobs_rest_required_fields( - request_type=service.ListImportJobsRequest, +def test_get_crypto_key_version_rest_required_fields( + request_type=service.GetCryptoKeyVersionRequest, ): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13032,30 +15511,21 @@ def test_list_import_jobs_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_import_jobs._get_unset_required_fields(jsonified_request) + ).get_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_import_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13064,7 +15534,7 @@ def test_list_import_jobs_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListImportJobsResponse() + return_value = resources.CryptoKeyVersion() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13085,40 +15555,30 @@ def test_list_import_jobs_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListImportJobsResponse.pb(return_value) + return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_import_jobs(request) + response = client.get_crypto_key_version(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_import_jobs_rest_unset_required_fields(): +def test_get_crypto_key_version_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_import_jobs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_crypto_key_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_import_jobs_rest_flattened(): +def test_get_crypto_key_version_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13127,16 +15587,16 @@ def test_list_import_jobs_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListImportJobsResponse() + return_value = resources.CryptoKeyVersion() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3" + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -13144,104 +15604,41 @@ def test_list_import_jobs_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListImportJobsResponse.pb(return_value) + return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_import_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/keyRings/*}/importJobs" - % client.transport._host, - args[1], - ) - - -def test_list_import_jobs_rest_flattened_error(transport: str = "rest"): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_import_jobs( - service.ListImportJobsRequest(), - parent="parent_value", - ) - - -def test_list_import_jobs_rest_pager(transport: str = "rest"): - client = KeyManagementServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListImportJobsResponse( - import_jobs=[ - resources.ImportJob(), - resources.ImportJob(), - resources.ImportJob(), - ], - next_page_token="abc", - ), - service.ListImportJobsResponse( - import_jobs=[], - next_page_token="def", - ), - service.ListImportJobsResponse( - import_jobs=[ - resources.ImportJob(), - ], - next_page_token="ghi", - ), - service.ListImportJobsResponse( - import_jobs=[ - resources.ImportJob(), - resources.ImportJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListImportJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3" - } + client.get_crypto_key_version(**mock_args) - pager = client.list_import_jobs(request=sample_request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}" + % client.transport._host, + args[1], + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.ImportJob) for i in results) - pages = list(client.list_import_jobs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_crypto_key_version_rest_flattened_error(transport: str = "rest"): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_crypto_key_version( + service.GetCryptoKeyVersionRequest(), + name="name_value", + ) -def test_get_key_ring_rest_use_cached_wrapped_rpc(): +def test_get_public_key_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13255,29 +15652,29 @@ def test_get_key_ring_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_key_ring in client._transport._wrapped_methods + assert client._transport.get_public_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_key_ring] = mock_rpc + client._transport._wrapped_methods[client._transport.get_public_key] = mock_rpc request = {} - client.get_key_ring(request) + client.get_public_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_key_ring(request) + client.get_public_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingRequest): +def test_get_public_key_rest_required_fields(request_type=service.GetPublicKeyRequest): transport_class = transports.KeyManagementServiceRestTransport request_init = {} @@ -13292,7 +15689,7 @@ def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_key_ring._get_unset_required_fields(jsonified_request) + ).get_public_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13301,7 +15698,9 @@ def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_key_ring._get_unset_required_fields(jsonified_request) + ).get_public_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("public_key_format",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13315,7 +15714,7 @@ def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.KeyRing() + return_value = resources.PublicKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13336,30 +15735,30 @@ def test_get_key_ring_rest_required_fields(request_type=service.GetKeyRingReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.KeyRing.pb(return_value) + return_value = resources.PublicKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_key_ring(request) + response = client.get_public_key(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_key_ring_rest_unset_required_fields(): +def test_get_public_key_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_key_ring._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.get_public_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("publicKeyFormat",)) & set(("name",))) -def test_get_key_ring_rest_flattened(): +def test_get_public_key_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13368,10 +15767,12 @@ def test_get_key_ring_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.KeyRing() + return_value = resources.PublicKey() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/keyRings/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" + } # get truthy value for each flattened field mock_args = dict( @@ -13383,25 +15784,26 @@ def test_get_key_ring_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.KeyRing.pb(return_value) + return_value = resources.PublicKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_key_ring(**mock_args) + client.get_public_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/keyRings/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}/publicKey" + % client.transport._host, args[1], ) -def test_get_key_ring_rest_flattened_error(transport: str = "rest"): +def test_get_public_key_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13410,13 +15812,13 @@ def test_get_key_ring_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_key_ring( - service.GetKeyRingRequest(), + client.get_public_key( + service.GetPublicKeyRequest(), name="name_value", ) -def test_get_crypto_key_rest_use_cached_wrapped_rpc(): +def test_get_import_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13430,29 +15832,29 @@ def test_get_crypto_key_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_crypto_key in client._transport._wrapped_methods + assert client._transport.get_import_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_crypto_key] = mock_rpc + client._transport._wrapped_methods[client._transport.get_import_job] = mock_rpc request = {} - client.get_crypto_key(request) + client.get_import_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_crypto_key(request) + client.get_import_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRequest): +def test_get_import_job_rest_required_fields(request_type=service.GetImportJobRequest): transport_class = transports.KeyManagementServiceRestTransport request_init = {} @@ -13467,7 +15869,7 @@ def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_crypto_key._get_unset_required_fields(jsonified_request) + ).get_import_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13476,7 +15878,7 @@ def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_crypto_key._get_unset_required_fields(jsonified_request) + ).get_import_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13490,7 +15892,7 @@ def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CryptoKey() + return_value = resources.ImportJob() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13511,30 +15913,30 @@ def test_get_crypto_key_rest_required_fields(request_type=service.GetCryptoKeyRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CryptoKey.pb(return_value) + return_value = resources.ImportJob.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_crypto_key(request) + response = client.get_import_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_crypto_key_rest_unset_required_fields(): +def test_get_import_job_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_crypto_key._get_unset_required_fields({}) + unset_fields = transport.get_import_job._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_crypto_key_rest_flattened(): +def test_get_import_job_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13543,11 +15945,11 @@ def test_get_crypto_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKey() + return_value = resources.ImportJob() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" } # get truthy value for each flattened field @@ -13560,26 +15962,26 @@ def test_get_crypto_key_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CryptoKey.pb(return_value) + return_value = resources.ImportJob.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_crypto_key(**mock_args) + client.get_import_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}" + "%s/v1/{name=projects/*/locations/*/keyRings/*/importJobs/*}" % client.transport._host, args[1], ) -def test_get_crypto_key_rest_flattened_error(transport: str = "rest"): +def test_get_import_job_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13588,13 +15990,13 @@ def test_get_crypto_key_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_crypto_key( - service.GetCryptoKeyRequest(), + client.get_import_job( + service.GetImportJobRequest(), name="name_value", ) -def test_get_crypto_key_version_rest_use_cached_wrapped_rpc(): +def test_get_retired_resource_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13609,8 +16011,7 @@ def test_get_crypto_key_version_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_crypto_key_version - in client._transport._wrapped_methods + client._transport.get_retired_resource in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13619,24 +16020,24 @@ def test_get_crypto_key_version_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_crypto_key_version + client._transport.get_retired_resource ] = mock_rpc request = {} - client.get_crypto_key_version(request) + client.get_retired_resource(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_crypto_key_version(request) + client.get_retired_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_crypto_key_version_rest_required_fields( - request_type=service.GetCryptoKeyVersionRequest, +def test_get_retired_resource_rest_required_fields( + request_type=service.GetRetiredResourceRequest, ): transport_class = transports.KeyManagementServiceRestTransport @@ -13652,7 +16053,7 @@ def test_get_crypto_key_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_crypto_key_version._get_unset_required_fields(jsonified_request) + ).get_retired_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13661,7 +16062,7 @@ def test_get_crypto_key_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_crypto_key_version._get_unset_required_fields(jsonified_request) + ).get_retired_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13675,7 +16076,7 @@ def test_get_crypto_key_version_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CryptoKeyVersion() + return_value = resources.RetiredResource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13696,30 +16097,30 @@ def test_get_crypto_key_version_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CryptoKeyVersion.pb(return_value) + return_value = resources.RetiredResource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_crypto_key_version(request) + response = client.get_retired_resource(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_crypto_key_version_rest_unset_required_fields(): +def test_get_retired_resource_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_crypto_key_version._get_unset_required_fields({}) + unset_fields = transport.get_retired_resource._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_crypto_key_version_rest_flattened(): +def test_get_retired_resource_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13728,11 +16129,11 @@ def test_get_crypto_key_version_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKeyVersion() + return_value = resources.RetiredResource() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" + "name": "projects/sample1/locations/sample2/retiredResources/sample3" } # get truthy value for each flattened field @@ -13745,26 +16146,26 @@ def test_get_crypto_key_version_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CryptoKeyVersion.pb(return_value) + return_value = resources.RetiredResource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_crypto_key_version(**mock_args) + client.get_retired_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}" + "%s/v1/{name=projects/*/locations/*/retiredResources/*}" % client.transport._host, args[1], ) -def test_get_crypto_key_version_rest_flattened_error(transport: str = "rest"): +def test_get_retired_resource_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13773,13 +16174,13 @@ def test_get_crypto_key_version_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_crypto_key_version( - service.GetCryptoKeyVersionRequest(), + client.get_retired_resource( + service.GetRetiredResourceRequest(), name="name_value", ) -def test_get_public_key_rest_use_cached_wrapped_rpc(): +def test_create_key_ring_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13793,33 +16194,36 @@ def test_get_public_key_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_public_key in client._transport._wrapped_methods + assert client._transport.create_key_ring in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_public_key] = mock_rpc + client._transport._wrapped_methods[client._transport.create_key_ring] = mock_rpc request = {} - client.get_public_key(request) + client.create_key_ring(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_public_key(request) + client.create_key_ring(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_public_key_rest_required_fields(request_type=service.GetPublicKeyRequest): +def test_create_key_ring_rest_required_fields( + request_type=service.CreateKeyRingRequest, +): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["key_ring_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13827,26 +16231,32 @@ def test_get_public_key_rest_required_fields(request_type=service.GetPublicKeyRe ) # verify fields with default values are dropped + assert "keyRingId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_public_key._get_unset_required_fields(jsonified_request) + ).create_key_ring._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "keyRingId" in jsonified_request + assert jsonified_request["keyRingId"] == request_init["key_ring_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["keyRingId"] = "key_ring_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_public_key._get_unset_required_fields(jsonified_request) + ).create_key_ring._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("public_key_format",)) + assert not set(unset_fields) - set(("key_ring_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "keyRingId" in jsonified_request + assert jsonified_request["keyRingId"] == "key_ring_id_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13855,7 +16265,7 @@ def test_get_public_key_rest_required_fields(request_type=service.GetPublicKeyRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.PublicKey() + return_value = resources.KeyRing() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13867,39 +16277,55 @@ def test_get_public_key_rest_required_fields(request_type=service.GetPublicKeyRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PublicKey.pb(return_value) + return_value = resources.KeyRing.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_public_key(request) + response = client.create_key_ring(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "keyRingId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_public_key_rest_unset_required_fields(): +def test_create_key_ring_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_public_key._get_unset_required_fields({}) - assert set(unset_fields) == (set(("publicKeyFormat",)) & set(("name",))) + unset_fields = transport.create_key_ring._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("keyRingId",)) + & set( + ( + "parent", + "keyRingId", + "keyRing", + ) + ) + ) -def test_get_public_key_rest_flattened(): +def test_create_key_ring_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13908,16 +16334,16 @@ def test_get_public_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.PublicKey() + return_value = resources.KeyRing() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) mock_args.update(sample_request) @@ -13925,26 +16351,25 @@ def test_get_public_key_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.PublicKey.pb(return_value) + return_value = resources.KeyRing.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_public_key(**mock_args) + client.create_key_ring(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}/publicKey" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/keyRings" % client.transport._host, args[1], ) -def test_get_public_key_rest_flattened_error(transport: str = "rest"): +def test_create_key_ring_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13953,13 +16378,15 @@ def test_get_public_key_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_public_key( - service.GetPublicKeyRequest(), - name="name_value", + client.create_key_ring( + service.CreateKeyRingRequest(), + parent="parent_value", + key_ring_id="key_ring_id_value", + key_ring=resources.KeyRing(name="name_value"), ) -def test_get_import_job_rest_use_cached_wrapped_rpc(): +def test_create_crypto_key_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13973,33 +16400,38 @@ def test_get_import_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_import_job in client._transport._wrapped_methods + assert client._transport.create_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_import_job] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_crypto_key + ] = mock_rpc request = {} - client.get_import_job(request) + client.create_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_import_job(request) + client.create_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_import_job_rest_required_fields(request_type=service.GetImportJobRequest): +def test_create_crypto_key_rest_required_fields( + request_type=service.CreateCryptoKeyRequest, +): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["crypto_key_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14007,24 +16439,37 @@ def test_get_import_job_rest_required_fields(request_type=service.GetImportJobRe ) # verify fields with default values are dropped + assert "cryptoKeyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_import_job._get_unset_required_fields(jsonified_request) + ).create_crypto_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "cryptoKeyId" in jsonified_request + assert jsonified_request["cryptoKeyId"] == request_init["crypto_key_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["cryptoKeyId"] = "crypto_key_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_import_job._get_unset_required_fields(jsonified_request) + ).create_crypto_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "crypto_key_id", + "skip_initial_version_creation", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "cryptoKeyId" in jsonified_request + assert jsonified_request["cryptoKeyId"] == "crypto_key_id_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14033,7 +16478,7 @@ def test_get_import_job_rest_required_fields(request_type=service.GetImportJobRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.ImportJob() + return_value = resources.CryptoKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14045,39 +16490,60 @@ def test_get_import_job_rest_required_fields(request_type=service.GetImportJobRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ImportJob.pb(return_value) + return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_import_job(request) + response = client.create_crypto_key(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "cryptoKeyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_import_job_rest_unset_required_fields(): +def test_create_crypto_key_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_import_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_crypto_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "cryptoKeyId", + "skipInitialVersionCreation", + ) + ) + & set( + ( + "parent", + "cryptoKeyId", + "cryptoKey", + ) + ) + ) -def test_get_import_job_rest_flattened(): +def test_create_crypto_key_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14086,16 +16552,18 @@ def test_get_import_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ImportJob() + return_value = resources.CryptoKey() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" + "parent": "projects/sample1/locations/sample2/keyRings/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), ) mock_args.update(sample_request) @@ -14103,26 +16571,26 @@ def test_get_import_job_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ImportJob.pb(return_value) + return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_import_job(**mock_args) + client.create_crypto_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/keyRings/*/importJobs/*}" + "%s/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys" % client.transport._host, args[1], ) -def test_get_import_job_rest_flattened_error(transport: str = "rest"): +def test_create_crypto_key_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14131,13 +16599,15 @@ def test_get_import_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_import_job( - service.GetImportJobRequest(), - name="name_value", + client.create_crypto_key( + service.CreateCryptoKeyRequest(), + parent="parent_value", + crypto_key_id="crypto_key_id_value", + crypto_key=resources.CryptoKey(name="name_value"), ) -def test_create_key_ring_rest_use_cached_wrapped_rpc(): +def test_create_crypto_key_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14151,36 +16621,40 @@ def test_create_key_ring_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_key_ring in client._transport._wrapped_methods + assert ( + client._transport.create_crypto_key_version + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_key_ring] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_crypto_key_version + ] = mock_rpc request = {} - client.create_key_ring(request) + client.create_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_key_ring(request) + client.create_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_key_ring_rest_required_fields( - request_type=service.CreateKeyRingRequest, +def test_create_crypto_key_version_rest_required_fields( + request_type=service.CreateCryptoKeyVersionRequest, ): transport_class = transports.KeyManagementServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["key_ring_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14188,32 +16662,24 @@ def test_create_key_ring_rest_required_fields( ) # verify fields with default values are dropped - assert "keyRingId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_key_ring._get_unset_required_fields(jsonified_request) + ).create_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "keyRingId" in jsonified_request - assert jsonified_request["keyRingId"] == request_init["key_ring_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["keyRingId"] = "key_ring_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_key_ring._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("key_ring_id",)) + ).create_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "keyRingId" in jsonified_request - assert jsonified_request["keyRingId"] == "key_ring_id_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14222,7 +16688,7 @@ def test_create_key_ring_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.KeyRing() + return_value = resources.CryptoKeyVersion() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14244,45 +16710,38 @@ def test_create_key_ring_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.KeyRing.pb(return_value) + return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_key_ring(request) + response = client.create_crypto_key_version(request) - expected_params = [ - ( - "keyRingId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_key_ring_rest_unset_required_fields(): +def test_create_crypto_key_version_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_key_ring._get_unset_required_fields({}) + unset_fields = transport.create_crypto_key_version._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("keyRingId",)) + set(()) & set( ( "parent", - "keyRingId", - "keyRing", + "cryptoKeyVersion", ) ) ) -def test_create_key_ring_rest_flattened(): +def test_create_crypto_key_version_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14291,16 +16750,17 @@ def test_create_key_ring_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.KeyRing() + return_value = resources.CryptoKeyVersion() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) mock_args.update(sample_request) @@ -14308,25 +16768,26 @@ def test_create_key_ring_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.KeyRing.pb(return_value) + return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_key_ring(**mock_args) + client.create_crypto_key_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/keyRings" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions" + % client.transport._host, args[1], ) -def test_create_key_ring_rest_flattened_error(transport: str = "rest"): +def test_create_crypto_key_version_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14335,15 +16796,14 @@ def test_create_key_ring_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_key_ring( - service.CreateKeyRingRequest(), + client.create_crypto_key_version( + service.CreateCryptoKeyVersionRequest(), parent="parent_value", - key_ring_id="key_ring_id_value", - key_ring=resources.KeyRing(name="name_value"), + crypto_key_version=resources.CryptoKeyVersion(name="name_value"), ) -def test_create_crypto_key_rest_use_cached_wrapped_rpc(): +def test_delete_crypto_key_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14357,7 +16817,7 @@ def test_create_crypto_key_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_crypto_key in client._transport._wrapped_methods + assert client._transport.delete_crypto_key in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -14365,30 +16825,33 @@ def test_create_crypto_key_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_crypto_key + client._transport.delete_crypto_key ] = mock_rpc request = {} - client.create_crypto_key(request) + client.delete_crypto_key(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_crypto_key(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_crypto_key(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_crypto_key_rest_required_fields( - request_type=service.CreateCryptoKeyRequest, +def test_delete_crypto_key_rest_required_fields( + request_type=service.DeleteCryptoKeyRequest, ): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["crypto_key_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14396,37 +16859,24 @@ def test_create_crypto_key_rest_required_fields( ) # verify fields with default values are dropped - assert "cryptoKeyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_crypto_key._get_unset_required_fields(jsonified_request) + ).delete_crypto_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "cryptoKeyId" in jsonified_request - assert jsonified_request["cryptoKeyId"] == request_init["crypto_key_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["cryptoKeyId"] = "crypto_key_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_crypto_key._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "crypto_key_id", - "skip_initial_version_creation", - ) - ) + ).delete_crypto_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "cryptoKeyId" in jsonified_request - assert jsonified_request["cryptoKeyId"] == "crypto_key_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14435,7 +16885,7 @@ def test_create_crypto_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CryptoKey() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14447,60 +16897,36 @@ def test_create_crypto_key_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_crypto_key(request) - - expected_params = [ - ( - "cryptoKeyId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_crypto_key_rest_unset_required_fields(): - transport = transports.KeyManagementServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_crypto_key._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "cryptoKeyId", - "skipInitialVersionCreation", - ) - ) - & set( - ( - "parent", - "cryptoKeyId", - "cryptoKey", - ) - ) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_crypto_key(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_crypto_key_rest_unset_required_fields(): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.delete_crypto_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + -def test_create_crypto_key_rest_flattened(): +def test_delete_crypto_key_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14509,45 +16935,41 @@ def test_create_crypto_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKey() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3" + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CryptoKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_crypto_key(**mock_args) + client.delete_crypto_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys" + "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}" % client.transport._host, args[1], ) -def test_create_crypto_key_rest_flattened_error(transport: str = "rest"): +def test_delete_crypto_key_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14556,15 +16978,13 @@ def test_create_crypto_key_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_crypto_key( - service.CreateCryptoKeyRequest(), - parent="parent_value", - crypto_key_id="crypto_key_id_value", - crypto_key=resources.CryptoKey(name="name_value"), + client.delete_crypto_key( + service.DeleteCryptoKeyRequest(), + name="name_value", ) -def test_create_crypto_key_version_rest_use_cached_wrapped_rpc(): +def test_delete_crypto_key_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14579,7 +16999,7 @@ def test_create_crypto_key_version_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_crypto_key_version + client._transport.delete_crypto_key_version in client._transport._wrapped_methods ) @@ -14589,29 +17009,33 @@ def test_create_crypto_key_version_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_crypto_key_version + client._transport.delete_crypto_key_version ] = mock_rpc request = {} - client.create_crypto_key_version(request) + client.delete_crypto_key_version(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_crypto_key_version(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_crypto_key_version(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_crypto_key_version_rest_required_fields( - request_type=service.CreateCryptoKeyVersionRequest, +def test_delete_crypto_key_version_rest_required_fields( + request_type=service.DeleteCryptoKeyVersionRequest, ): transport_class = transports.KeyManagementServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14622,21 +17046,21 @@ def test_create_crypto_key_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_crypto_key_version._get_unset_required_fields(jsonified_request) + ).delete_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_crypto_key_version._get_unset_required_fields(jsonified_request) + ).delete_crypto_key_version._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14645,7 +17069,7 @@ def test_create_crypto_key_version_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.CryptoKeyVersion() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14657,48 +17081,36 @@ def test_create_crypto_key_version_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_crypto_key_version(request) + response = client.delete_crypto_key_version(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_crypto_key_version_rest_unset_required_fields(): +def test_delete_crypto_key_version_rest_unset_required_fields(): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_crypto_key_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "cryptoKeyVersion", - ) - ) - ) + unset_fields = transport.delete_crypto_key_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_crypto_key_version_rest_flattened(): +def test_delete_crypto_key_version_rest_flattened(): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14707,44 +17119,41 @@ def test_create_crypto_key_version_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKeyVersion() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_crypto_key_version(**mock_args) + client.delete_crypto_key_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions" + "%s/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}" % client.transport._host, args[1], ) -def test_create_crypto_key_version_rest_flattened_error(transport: str = "rest"): +def test_delete_crypto_key_version_rest_flattened_error(transport: str = "rest"): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14753,10 +17162,9 @@ def test_create_crypto_key_version_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_crypto_key_version( - service.CreateCryptoKeyVersionRequest(), - parent="parent_value", - crypto_key_version=resources.CryptoKeyVersion(name="name_value"), + client.delete_crypto_key_version( + service.DeleteCryptoKeyVersionRequest(), + name="name_value", ) @@ -17927,6 +20335,29 @@ def test_list_import_jobs_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_retired_resources_empty_call_grpc(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + call.return_value = service.ListRetiredResourcesResponse() + client.list_retired_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListRetiredResourcesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_key_ring_empty_call_grpc(): @@ -18034,6 +20465,29 @@ def test_get_import_job_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_retired_resource_empty_call_grpc(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_retired_resource), "__call__" + ) as call: + call.return_value = resources.RetiredResource() + client.get_retired_resource(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetRetiredResourceRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_create_key_ring_empty_call_grpc(): @@ -18101,6 +20555,52 @@ def test_create_crypto_key_version_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_crypto_key_empty_call_grpc(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_crypto_key(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteCryptoKeyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_crypto_key_version_empty_call_grpc(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key_version), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_crypto_key_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteCryptoKeyVersionRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_import_crypto_key_version_empty_call_grpc(): @@ -18604,6 +21104,36 @@ async def test_list_import_jobs_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_retired_resources_empty_call_grpc_asyncio(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRetiredResourcesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + await client.list_retired_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListRetiredResourcesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -18760,6 +21290,37 @@ async def test_get_import_job_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_retired_resource_empty_call_grpc_asyncio(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_retired_resource), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.RetiredResource( + name="name_value", + original_resource="original_resource_value", + resource_type="resource_type_value", + ) + ) + await client.get_retired_resource(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetRetiredResourceRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -18846,12 +21407,66 @@ async def test_create_crypto_key_version_empty_call_grpc_asyncio(): reimport_eligible=True, ) ) - await client.create_crypto_key_version(request=None) + await client.create_crypto_key_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateCryptoKeyVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_crypto_key_empty_call_grpc_asyncio(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_crypto_key(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteCryptoKeyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_crypto_key_version_empty_call_grpc_asyncio(): + client = KeyManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_crypto_key_version(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.CreateCryptoKeyVersionRequest() + request_msg = service.DeleteCryptoKeyVersionRequest() assert args[0] == request_msg @@ -19940,6 +22555,141 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_list_retired_resources_rest_bad_request( + request_type=service.ListRetiredResourcesRequest, +): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_retired_resources(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListRetiredResourcesRequest, + dict, + ], +) +def test_list_retired_resources_rest_call_success(request_type): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListRetiredResourcesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListRetiredResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_retired_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRetiredResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_retired_resources_rest_interceptors(null_interceptor): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.KeyManagementServiceRestInterceptor(), + ) + client = KeyManagementServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_list_retired_resources" + ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_list_retired_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "pre_list_retired_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListRetiredResourcesRequest.pb( + service.ListRetiredResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListRetiredResourcesResponse.to_json( + service.ListRetiredResourcesResponse() + ) + req.return_value.content = return_value + + request = service.ListRetiredResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListRetiredResourcesResponse() + post_with_metadata.return_value = ( + service.ListRetiredResourcesResponse(), + metadata, + ) + + client.list_retired_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_get_key_ring_rest_bad_request(request_type=service.GetKeyRingRequest): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -20428,13 +23178,152 @@ def test_get_public_key_rest_call_success(request_type): response.algorithm == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION ) - assert response.name == "name_value" + assert response.name == "name_value" + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_public_key_rest_interceptors(null_interceptor): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.KeyManagementServiceRestInterceptor(), + ) + client = KeyManagementServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_get_public_key" + ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_public_key_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "pre_get_public_key" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetPublicKeyRequest.pb(service.GetPublicKeyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.PublicKey.to_json(resources.PublicKey()) + req.return_value.content = return_value + + request = service.GetPublicKeyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.PublicKey() + post_with_metadata.return_value = resources.PublicKey(), metadata + + client.get_public_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_import_job_rest_bad_request(request_type=service.GetImportJobRequest): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_import_job(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetImportJobRequest, + dict, + ], +) +def test_get_import_job_rest_call_success(request_type): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ImportJob( + name="name_value", + import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, + protection_level=resources.ProtectionLevel.SOFTWARE, + state=resources.ImportJob.ImportJobState.PENDING_GENERATION, + crypto_key_backend="crypto_key_backend_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ImportJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_import_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ImportJob) + assert response.name == "name_value" + assert ( + response.import_method + == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 + ) assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.public_key_format == resources.PublicKey.PublicKeyFormat.PEM + assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION + assert response.crypto_key_backend == "crypto_key_backend_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_public_key_rest_interceptors(null_interceptor): +def test_get_import_job_rest_interceptors(null_interceptor): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20448,17 +23337,17 @@ def test_get_public_key_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "post_get_public_key" + transports.KeyManagementServiceRestInterceptor, "post_get_import_job" ) as post, mock.patch.object( transports.KeyManagementServiceRestInterceptor, - "post_get_public_key_with_metadata", + "post_get_import_job_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "pre_get_public_key" + transports.KeyManagementServiceRestInterceptor, "pre_get_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.GetPublicKeyRequest.pb(service.GetPublicKeyRequest()) + pb_message = service.GetImportJobRequest.pb(service.GetImportJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20469,19 +23358,19 @@ def test_get_public_key_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.PublicKey.to_json(resources.PublicKey()) + return_value = resources.ImportJob.to_json(resources.ImportJob()) req.return_value.content = return_value - request = service.GetPublicKeyRequest() + request = service.GetImportJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.PublicKey() - post_with_metadata.return_value = resources.PublicKey(), metadata + post.return_value = resources.ImportJob() + post_with_metadata.return_value = resources.ImportJob(), metadata - client.get_public_key( + client.get_import_job( request, metadata=[ ("key", "val"), @@ -20494,13 +23383,15 @@ def test_get_public_key_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_import_job_rest_bad_request(request_type=service.GetImportJobRequest): +def test_get_retired_resource_rest_bad_request( + request_type=service.GetRetiredResourceRequest, +): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" + "name": "projects/sample1/locations/sample2/retiredResources/sample3" } request = request_type(**request_init) @@ -20516,36 +23407,34 @@ def test_get_import_job_rest_bad_request(request_type=service.GetImportJobReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_import_job(request) + client.get_retired_resource(request) @pytest.mark.parametrize( "request_type", [ - service.GetImportJobRequest, + service.GetRetiredResourceRequest, dict, ], ) -def test_get_import_job_rest_call_success(request_type): +def test_get_retired_resource_rest_call_success(request_type): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/keyRings/sample3/importJobs/sample4" + "name": "projects/sample1/locations/sample2/retiredResources/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ImportJob( + return_value = resources.RetiredResource( name="name_value", - import_method=resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256, - protection_level=resources.ProtectionLevel.SOFTWARE, - state=resources.ImportJob.ImportJobState.PENDING_GENERATION, - crypto_key_backend="crypto_key_backend_value", + original_resource="original_resource_value", + resource_type="resource_type_value", ) # Wrap the value into a proper Response obj @@ -20553,27 +23442,22 @@ def test_get_import_job_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ImportJob.pb(return_value) + return_value = resources.RetiredResource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_import_job(request) + response = client.get_retired_resource(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.ImportJob) + assert isinstance(response, resources.RetiredResource) assert response.name == "name_value" - assert ( - response.import_method - == resources.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256 - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert response.state == resources.ImportJob.ImportJobState.PENDING_GENERATION - assert response.crypto_key_backend == "crypto_key_backend_value" + assert response.original_resource == "original_resource_value" + assert response.resource_type == "resource_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_import_job_rest_interceptors(null_interceptor): +def test_get_retired_resource_rest_interceptors(null_interceptor): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20587,17 +23471,19 @@ def test_get_import_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "post_get_import_job" + transports.KeyManagementServiceRestInterceptor, "post_get_retired_resource" ) as post, mock.patch.object( transports.KeyManagementServiceRestInterceptor, - "post_get_import_job_with_metadata", + "post_get_retired_resource_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "pre_get_import_job" + transports.KeyManagementServiceRestInterceptor, "pre_get_retired_resource" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.GetImportJobRequest.pb(service.GetImportJobRequest()) + pb_message = service.GetRetiredResourceRequest.pb( + service.GetRetiredResourceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20608,19 +23494,19 @@ def test_get_import_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.ImportJob.to_json(resources.ImportJob()) + return_value = resources.RetiredResource.to_json(resources.RetiredResource()) req.return_value.content = return_value - request = service.GetImportJobRequest() + request = service.GetRetiredResourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.ImportJob() - post_with_metadata.return_value = resources.ImportJob(), metadata + post.return_value = resources.RetiredResource() + post_with_metadata.return_value = resources.RetiredResource(), metadata - client.get_import_job( + client.get_retired_resource( request, metadata=[ ("key", "val"), @@ -20902,25 +23788,263 @@ def test_create_crypto_key_rest_call_success(request_type): "external_key_uri": "external_key_uri_value", "ekm_connection_key_path": "ekm_connection_key_path_value", }, - "reimport_eligible": True, + "reimport_eligible": True, + }, + "purpose": 1, + "create_time": {}, + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + "version_template": {"protection_level": 1, "algorithm": 1}, + "labels": {}, + "import_only": True, + "destroy_scheduled_duration": {}, + "crypto_key_backend": "crypto_key_backend_value", + "key_access_justifications_policy": {"allowed_access_reasons": [1]}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateCryptoKeyRequest.meta.fields["crypto_key"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["crypto_key"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["crypto_key"][field])): + del request_init["crypto_key"][field][i][subfield] + else: + del request_init["crypto_key"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.CryptoKey( + name="name_value", + purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, + import_only=True, + crypto_key_backend="crypto_key_backend_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.CryptoKey.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_crypto_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.CryptoKey) + assert response.name == "name_value" + assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT + assert response.import_only is True + assert response.crypto_key_backend == "crypto_key_backend_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_crypto_key_rest_interceptors(null_interceptor): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.KeyManagementServiceRestInterceptor(), + ) + client = KeyManagementServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key" + ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_create_crypto_key_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateCryptoKeyRequest.pb(service.CreateCryptoKeyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.CryptoKey.to_json(resources.CryptoKey()) + req.return_value.content = return_value + + request = service.CreateCryptoKeyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.CryptoKey() + post_with_metadata.return_value = resources.CryptoKey(), metadata + + client.create_crypto_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_crypto_key_version_rest_bad_request( + request_type=service.CreateCryptoKeyVersionRequest, +): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_crypto_key_version(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateCryptoKeyVersionRequest, + dict, + ], +) +def test_create_crypto_key_version_rest_call_success(request_type): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + } + request_init["crypto_key_version"] = { + "name": "name_value", + "state": 5, + "protection_level": 1, + "algorithm": 1, + "attestation": { + "format": 3, + "content": b"content_blob", + "cert_chains": { + "cavium_certs": ["cavium_certs_value1", "cavium_certs_value2"], + "google_card_certs": [ + "google_card_certs_value1", + "google_card_certs_value2", + ], + "google_partition_certs": [ + "google_partition_certs_value1", + "google_partition_certs_value2", + ], + }, }, - "purpose": 1, - "create_time": {}, - "next_rotation_time": {}, - "rotation_period": {"seconds": 751, "nanos": 543}, - "version_template": {"protection_level": 1, "algorithm": 1}, - "labels": {}, - "import_only": True, - "destroy_scheduled_duration": {}, - "crypto_key_backend": "crypto_key_backend_value", - "key_access_justifications_policy": {"allowed_access_reasons": [1]}, + "create_time": {"seconds": 751, "nanos": 543}, + "generate_time": {}, + "destroy_time": {}, + "destroy_event_time": {}, + "import_job": "import_job_value", + "import_time": {}, + "import_failure_reason": "import_failure_reason_value", + "generation_failure_reason": "generation_failure_reason_value", + "external_destruction_failure_reason": "external_destruction_failure_reason_value", + "external_protection_level_options": { + "external_key_uri": "external_key_uri_value", + "ekm_connection_key_path": "ekm_connection_key_path_value", + }, + "reimport_eligible": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateCryptoKeyRequest.meta.fields["crypto_key"] + test_field = service.CreateCryptoKeyVersionRequest.meta.fields["crypto_key_version"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20948,7 +24072,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["crypto_key"].items(): # pragma: NO COVER + for field, value in request_init["crypto_key_version"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20978,20 +24102,25 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["crypto_key"][field])): - del request_init["crypto_key"][field][i][subfield] + for i in range(0, len(request_init["crypto_key_version"][field])): + del request_init["crypto_key_version"][field][i][subfield] else: - del request_init["crypto_key"][field][subfield] + del request_init["crypto_key_version"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKey( + return_value = resources.CryptoKeyVersion( name="name_value", - purpose=resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT, - import_only=True, - crypto_key_backend="crypto_key_backend_value", + state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, + protection_level=resources.ProtectionLevel.SOFTWARE, + algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, + import_job="import_job_value", + import_failure_reason="import_failure_reason_value", + generation_failure_reason="generation_failure_reason_value", + external_destruction_failure_reason="external_destruction_failure_reason_value", + reimport_eligible=True, ) # Wrap the value into a proper Response obj @@ -20999,23 +24128,37 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.CryptoKey.pb(return_value) + return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_crypto_key(request) + response = client.create_crypto_key_version(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKey) + assert isinstance(response, resources.CryptoKeyVersion) assert response.name == "name_value" - assert response.purpose == resources.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT - assert response.import_only is True - assert response.crypto_key_backend == "crypto_key_backend_value" + assert ( + response.state + == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION + ) + assert response.protection_level == resources.ProtectionLevel.SOFTWARE + assert ( + response.algorithm + == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION + ) + assert response.import_job == "import_job_value" + assert response.import_failure_reason == "import_failure_reason_value" + assert response.generation_failure_reason == "generation_failure_reason_value" + assert ( + response.external_destruction_failure_reason + == "external_destruction_failure_reason_value" + ) + assert response.reimport_eligible is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_crypto_key_rest_interceptors(null_interceptor): +def test_create_crypto_key_version_rest_interceptors(null_interceptor): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21029,17 +24172,19 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key" + transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key_version" ) as post, mock.patch.object( transports.KeyManagementServiceRestInterceptor, - "post_create_crypto_key_with_metadata", + "post_create_crypto_key_version_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key" + transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.CreateCryptoKeyRequest.pb(service.CreateCryptoKeyRequest()) + pb_message = service.CreateCryptoKeyVersionRequest.pb( + service.CreateCryptoKeyVersionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21050,19 +24195,19 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.CryptoKey.to_json(resources.CryptoKey()) + return_value = resources.CryptoKeyVersion.to_json(resources.CryptoKeyVersion()) req.return_value.content = return_value - request = service.CreateCryptoKeyRequest() + request = service.CreateCryptoKeyVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CryptoKey() - post_with_metadata.return_value = resources.CryptoKey(), metadata + post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata - client.create_crypto_key( + client.create_crypto_key_version( request, metadata=[ ("key", "val"), @@ -21075,15 +24220,15 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_crypto_key_version_rest_bad_request( - request_type=service.CreateCryptoKeyVersionRequest, +def test_delete_crypto_key_rest_bad_request( + request_type=service.DeleteCryptoKeyRequest, ): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" } request = request_type(**request_init) @@ -21099,180 +24244,173 @@ def test_create_crypto_key_version_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_crypto_key_version(request) + client.delete_crypto_key(request) @pytest.mark.parametrize( "request_type", [ - service.CreateCryptoKeyVersionRequest, + service.DeleteCryptoKeyRequest, dict, ], ) -def test_create_crypto_key_version_rest_call_success(request_type): +def test_delete_crypto_key_rest_call_success(request_type): client = KeyManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" - } - request_init["crypto_key_version"] = { - "name": "name_value", - "state": 5, - "protection_level": 1, - "algorithm": 1, - "attestation": { - "format": 3, - "content": b"content_blob", - "cert_chains": { - "cavium_certs": ["cavium_certs_value1", "cavium_certs_value2"], - "google_card_certs": [ - "google_card_certs_value1", - "google_card_certs_value2", - ], - "google_partition_certs": [ - "google_partition_certs_value1", - "google_partition_certs_value2", - ], - }, - }, - "create_time": {"seconds": 751, "nanos": 543}, - "generate_time": {}, - "destroy_time": {}, - "destroy_event_time": {}, - "import_job": "import_job_value", - "import_time": {}, - "import_failure_reason": "import_failure_reason_value", - "generation_failure_reason": "generation_failure_reason_value", - "external_destruction_failure_reason": "external_destruction_failure_reason_value", - "external_protection_level_options": { - "external_key_uri": "external_key_uri_value", - "ekm_connection_key_path": "ekm_connection_key_path_value", - }, - "reimport_eligible": True, + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateCryptoKeyVersionRequest.meta.fields["crypto_key_version"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_crypto_key(request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_crypto_key_rest_interceptors(null_interceptor): + transport = transports.KeyManagementServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.KeyManagementServiceRestInterceptor(), + ) + client = KeyManagementServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_delete_crypto_key" + ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_delete_crypto_key_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "pre_delete_crypto_key" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteCryptoKeyRequest.pb(service.DeleteCryptoKeyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteCryptoKeyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_crypto_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + - subfields_not_in_runtime = [] +def test_delete_crypto_key_version_rest_bad_request( + request_type=service.DeleteCryptoKeyVersionRequest, +): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" + } + request = request_type(**request_init) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["crypto_key_version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_crypto_key_version(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["crypto_key_version"][field])): - del request_init["crypto_key_version"][field][i][subfield] - else: - del request_init["crypto_key_version"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteCryptoKeyVersionRequest, + dict, + ], +) +def test_delete_crypto_key_version_rest_call_success(request_type): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/keyRings/sample3/cryptoKeys/sample4/cryptoKeyVersions/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.CryptoKeyVersion( - name="name_value", - state=resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION, - protection_level=resources.ProtectionLevel.SOFTWARE, - algorithm=resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION, - import_job="import_job_value", - import_failure_reason="import_failure_reason_value", - generation_failure_reason="generation_failure_reason_value", - external_destruction_failure_reason="external_destruction_failure_reason_value", - reimport_eligible=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.CryptoKeyVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_crypto_key_version(request) + response = client.delete_crypto_key_version(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.CryptoKeyVersion) - assert response.name == "name_value" - assert ( - response.state - == resources.CryptoKeyVersion.CryptoKeyVersionState.PENDING_GENERATION - ) - assert response.protection_level == resources.ProtectionLevel.SOFTWARE - assert ( - response.algorithm - == resources.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION - ) - assert response.import_job == "import_job_value" - assert response.import_failure_reason == "import_failure_reason_value" - assert response.generation_failure_reason == "generation_failure_reason_value" - assert ( - response.external_destruction_failure_reason - == "external_destruction_failure_reason_value" - ) - assert response.reimport_eligible is True + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_crypto_key_version_rest_interceptors(null_interceptor): +def test_delete_crypto_key_version_rest_interceptors(null_interceptor): transport = transports.KeyManagementServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21286,18 +24424,20 @@ def test_create_crypto_key_version_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key_version" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_delete_crypto_key_version" ) as post, mock.patch.object( transports.KeyManagementServiceRestInterceptor, - "post_create_crypto_key_version_with_metadata", + "post_delete_crypto_key_version_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key_version" + transports.KeyManagementServiceRestInterceptor, "pre_delete_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.CreateCryptoKeyVersionRequest.pb( - service.CreateCryptoKeyVersionRequest() + pb_message = service.DeleteCryptoKeyVersionRequest.pb( + service.DeleteCryptoKeyVersionRequest() ) transcode.return_value = { "method": "post", @@ -21309,19 +24449,19 @@ def test_create_crypto_key_version_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.CryptoKeyVersion.to_json(resources.CryptoKeyVersion()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.CreateCryptoKeyVersionRequest() + request = service.DeleteCryptoKeyVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.CryptoKeyVersion() - post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_crypto_key_version( + client.delete_crypto_key_version( request, metadata=[ ("key", "val"), @@ -24511,6 +27651,28 @@ def test_list_import_jobs_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_retired_resources_empty_call_rest(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_retired_resources), "__call__" + ) as call: + client.list_retired_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListRetiredResourcesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_key_ring_empty_call_rest(): @@ -24613,6 +27775,28 @@ def test_get_import_job_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_retired_resource_empty_call_rest(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_retired_resource), "__call__" + ) as call: + client.get_retired_resource(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetRetiredResourceRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_create_key_ring_empty_call_rest(): @@ -24677,6 +27861,50 @@ def test_create_crypto_key_version_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_crypto_key_empty_call_rest(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key), "__call__" + ) as call: + client.delete_crypto_key(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteCryptoKeyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_crypto_key_version_empty_call_rest(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_crypto_key_version), "__call__" + ) as call: + client.delete_crypto_key_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteCryptoKeyVersionRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_import_crypto_key_version_empty_call_rest(): @@ -25035,6 +28263,23 @@ def test_generate_random_bytes_empty_call_rest(): assert args[0] == request_msg +def test_key_management_service_rest_lro_client(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = KeyManagementServiceClient( @@ -25072,14 +28317,18 @@ def test_key_management_service_base_transport(): "list_crypto_keys", "list_crypto_key_versions", "list_import_jobs", + "list_retired_resources", "get_key_ring", "get_crypto_key", "get_crypto_key_version", "get_public_key", "get_import_job", + "get_retired_resource", "create_key_ring", "create_crypto_key", "create_crypto_key_version", + "delete_crypto_key", + "delete_crypto_key_version", "import_crypto_key_version", "create_import_job", "update_crypto_key", @@ -25111,6 +28360,11 @@ def test_key_management_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -25387,6 +28641,9 @@ def test_key_management_service_client_transport_session_collision(transport_nam session1 = client1.transport.list_import_jobs._session session2 = client2.transport.list_import_jobs._session assert session1 != session2 + session1 = client1.transport.list_retired_resources._session + session2 = client2.transport.list_retired_resources._session + assert session1 != session2 session1 = client1.transport.get_key_ring._session session2 = client2.transport.get_key_ring._session assert session1 != session2 @@ -25402,6 +28659,9 @@ def test_key_management_service_client_transport_session_collision(transport_nam session1 = client1.transport.get_import_job._session session2 = client2.transport.get_import_job._session assert session1 != session2 + session1 = client1.transport.get_retired_resource._session + session2 = client2.transport.get_retired_resource._session + assert session1 != session2 session1 = client1.transport.create_key_ring._session session2 = client2.transport.create_key_ring._session assert session1 != session2 @@ -25411,6 +28671,12 @@ def test_key_management_service_client_transport_session_collision(transport_nam session1 = client1.transport.create_crypto_key_version._session session2 = client2.transport.create_crypto_key_version._session assert session1 != session2 + session1 = client1.transport.delete_crypto_key._session + session2 = client2.transport.delete_crypto_key._session + assert session1 != session2 + session1 = client1.transport.delete_crypto_key_version._session + session2 = client2.transport.delete_crypto_key_version._session + assert session1 != session2 session1 = client1.transport.import_crypto_key_version._session session2 = client2.transport.import_crypto_key_version._session assert session1 != session2 @@ -25591,6 +28857,40 @@ def test_key_management_service_transport_channel_mtls_with_adc(transport_class) assert transport.grpc_channel == mock_grpc_channel +def test_key_management_service_grpc_lro_client(): + client = KeyManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_key_management_service_grpc_lro_async_client(): + client = KeyManagementServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_crypto_key_path(): project = "squid" location = "clam" @@ -25747,8 +29047,36 @@ def test_parse_public_key_path(): assert expected == actual +def test_retired_resource_path(): + project = "cuttlefish" + location = "mussel" + retired_resource = "winkle" + expected = "projects/{project}/locations/{location}/retiredResources/{retired_resource}".format( + project=project, + location=location, + retired_resource=retired_resource, + ) + actual = KeyManagementServiceClient.retired_resource_path( + project, location, retired_resource + ) + assert expected == actual + + +def test_parse_retired_resource_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "retired_resource": "abalone", + } + path = KeyManagementServiceClient.retired_resource_path(**expected) + + # Check that the path construction is reversible. + actual = KeyManagementServiceClient.parse_retired_resource_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -25758,7 +29086,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = KeyManagementServiceClient.common_billing_account_path(**expected) @@ -25768,7 +29096,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -25778,7 +29106,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = KeyManagementServiceClient.common_folder_path(**expected) @@ -25788,7 +29116,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -25798,7 +29126,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = KeyManagementServiceClient.common_organization_path(**expected) @@ -25808,7 +29136,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -25818,7 +29146,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = KeyManagementServiceClient.common_project_path(**expected) @@ -25828,8 +29156,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -25840,8 +29168,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = KeyManagementServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/common.py b/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/common.py index 32502ef539cc..fc6ba24e8358 100644 --- a/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/common.py +++ b/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/common.py @@ -284,12 +284,19 @@ class Type(proto.Enum): Condition type is succeeded. TYPE_CANCELLED (5): Condition type is cancelled. + TYPE_APP_CREATED (6): + Indicates if AppHub app has been created. + TYPE_APP_COMPONENTS_REGISTERED (7): + Indicates if services and workloads have been + registered with AppHub. """ TYPE_UNSPECIFIED = 0 TYPE_SCHEDULED = 2 TYPE_RUNNING = 3 TYPE_SUCCEEDED = 4 TYPE_CANCELLED = 5 + TYPE_APP_CREATED = 6 + TYPE_APP_COMPONENTS_REGISTERED = 7 status: Status = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations/__init__.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations/__init__.py index 590e32bc7a11..a6df9b7819e7 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations/__init__.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations/__init__.py @@ -29,13 +29,17 @@ CancelJobResponse, CreateJobRequest, DeleteJobRequest, + GetBucketOperationRequest, GetJobRequest, + ListBucketOperationsRequest, + ListBucketOperationsResponse, ListJobsRequest, ListJobsResponse, OperationMetadata, ) from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import ( BucketList, + BucketOperation, Counters, DeleteObject, ErrorLogEntry, @@ -57,11 +61,15 @@ "CancelJobResponse", "CreateJobRequest", "DeleteJobRequest", + "GetBucketOperationRequest", "GetJobRequest", + "ListBucketOperationsRequest", + "ListBucketOperationsResponse", "ListJobsRequest", "ListJobsResponse", "OperationMetadata", "BucketList", + "BucketOperation", "Counters", "DeleteObject", "ErrorLogEntry", diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/__init__.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/__init__.py index 8ea439fb21fc..339da526341d 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/__init__.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/__init__.py @@ -37,13 +37,17 @@ CancelJobResponse, CreateJobRequest, DeleteJobRequest, + GetBucketOperationRequest, GetJobRequest, + ListBucketOperationsRequest, + ListBucketOperationsResponse, ListJobsRequest, ListJobsResponse, OperationMetadata, ) from .types.storage_batch_operations_types import ( BucketList, + BucketOperation, Counters, DeleteObject, ErrorLogEntry, @@ -155,6 +159,7 @@ def _get_version(dependency_name): __all__ = ( "StorageBatchOperationsAsyncClient", "BucketList", + "BucketOperation", "CancelJobRequest", "CancelJobResponse", "Counters", @@ -163,8 +168,11 @@ def _get_version(dependency_name): "DeleteObject", "ErrorLogEntry", "ErrorSummary", + "GetBucketOperationRequest", "GetJobRequest", "Job", + "ListBucketOperationsRequest", + "ListBucketOperationsResponse", "ListJobsRequest", "ListJobsResponse", "LoggingConfig", diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/gapic_metadata.json b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/gapic_metadata.json index c5bc99c44670..349b9f6e04f7 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/gapic_metadata.json +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/gapic_metadata.json @@ -25,11 +25,21 @@ "delete_job" ] }, + "GetBucketOperation": { + "methods": [ + "get_bucket_operation" + ] + }, "GetJob": { "methods": [ "get_job" ] }, + "ListBucketOperations": { + "methods": [ + "list_bucket_operations" + ] + }, "ListJobs": { "methods": [ "list_jobs" @@ -55,11 +65,21 @@ "delete_job" ] }, + "GetBucketOperation": { + "methods": [ + "get_bucket_operation" + ] + }, "GetJob": { "methods": [ "get_job" ] }, + "ListBucketOperations": { + "methods": [ + "list_bucket_operations" + ] + }, "ListJobs": { "methods": [ "list_jobs" @@ -85,11 +105,21 @@ "delete_job" ] }, + "GetBucketOperation": { + "methods": [ + "get_bucket_operation" + ] + }, "GetJob": { "methods": [ "get_job" ] }, + "ListBucketOperations": { + "methods": [ + "list_bucket_operations" + ] + }, "ListJobs": { "methods": [ "list_jobs" diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py index 8e2096e2fe18..3939170187d7 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py @@ -89,6 +89,12 @@ class StorageBatchOperationsAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = StorageBatchOperationsClient._DEFAULT_UNIVERSE + bucket_operation_path = staticmethod( + StorageBatchOperationsClient.bucket_operation_path + ) + parse_bucket_operation_path = staticmethod( + StorageBatchOperationsClient.parse_bucket_operation_path + ) crypto_key_path = staticmethod(StorageBatchOperationsClient.crypto_key_path) parse_crypto_key_path = staticmethod( StorageBatchOperationsClient.parse_crypto_key_path @@ -909,6 +915,250 @@ async def sample_cancel_job(): # Done; return the response. return response + async def list_bucket_operations( + self, + request: Optional[ + Union[storage_batch_operations.ListBucketOperationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketOperationsAsyncPager: + r"""Lists BucketOperations in a given project and job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_list_bucket_operations(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListBucketOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_bucket_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest, dict]]): + The request object. Message for request to list + BucketOperations + parent (:class:`str`): + Required. Format: + projects/{project_id}/locations/global/jobs/{job_id}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListBucketOperationsAsyncPager: + Message for response to listing + BucketOperations + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, storage_batch_operations.ListBucketOperationsRequest + ): + request = storage_batch_operations.ListBucketOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_bucket_operations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBucketOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bucket_operation( + self, + request: Optional[ + Union[storage_batch_operations.GetBucketOperationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations_types.BucketOperation: + r"""Gets a BucketOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_get_bucket_operation(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetBucketOperationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.GetBucketOperationRequest, dict]]): + The request object. Message for getting a + BucketOperation. + name (:class:`str`): + Required. ``name`` of the bucket operation to retrieve. + Format: + projects/{project_id}/locations/global/jobs/{job_id}/bucketOperations/{bucket_operation_id}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.BucketOperation: + BucketOperation represents a + bucket-level breakdown of a Job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.GetBucketOperationRequest): + request = storage_batch_operations.GetBucketOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket_operation + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py index 7bb5ac314b8a..98d1763d8abf 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py @@ -240,6 +240,30 @@ def transport(self) -> StorageBatchOperationsTransport: """ return self._transport + @staticmethod + def bucket_operation_path( + project: str, + location: str, + job: str, + bucket_operation: str, + ) -> str: + """Returns a fully-qualified bucket_operation string.""" + return "projects/{project}/locations/{location}/jobs/{job}/bucketOperations/{bucket_operation}".format( + project=project, + location=location, + job=job, + bucket_operation=bucket_operation, + ) + + @staticmethod + def parse_bucket_operation_path(path: str) -> Dict[str, str]: + """Parses a bucket_operation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)/bucketOperations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_path( project: str, @@ -1358,6 +1382,244 @@ def sample_cancel_job(): # Done; return the response. return response + def list_bucket_operations( + self, + request: Optional[ + Union[storage_batch_operations.ListBucketOperationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketOperationsPager: + r"""Lists BucketOperations in a given project and job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_list_bucket_operations(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListBucketOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_bucket_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest, dict]): + The request object. Message for request to list + BucketOperations + parent (str): + Required. Format: + projects/{project_id}/locations/global/jobs/{job_id}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListBucketOperationsPager: + Message for response to listing + BucketOperations + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, storage_batch_operations.ListBucketOperationsRequest + ): + request = storage_batch_operations.ListBucketOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_bucket_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBucketOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bucket_operation( + self, + request: Optional[ + Union[storage_batch_operations.GetBucketOperationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations_types.BucketOperation: + r"""Gets a BucketOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_get_bucket_operation(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetBucketOperationRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.GetBucketOperationRequest, dict]): + The request object. Message for getting a + BucketOperation. + name (str): + Required. ``name`` of the bucket operation to retrieve. + Format: + projects/{project_id}/locations/global/jobs/{job_id}/bucketOperations/{bucket_operation_id}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.BucketOperation: + BucketOperation represents a + bucket-level breakdown of a Job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.GetBucketOperationRequest): + request = storage_batch_operations.GetBucketOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bucket_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "StorageBatchOperationsClient": return self diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py index 9ba9cb6a7cdc..c9c4e0efaace 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py @@ -198,3 +198,165 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBucketOperationsPager: + """A pager for iterating through ``list_bucket_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``bucket_operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBucketOperations`` requests and continue to iterate + through the ``bucket_operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., storage_batch_operations.ListBucketOperationsResponse], + request: storage_batch_operations.ListBucketOperationsRequest, + response: storage_batch_operations.ListBucketOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest): + The initial request object. + response (google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = storage_batch_operations.ListBucketOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[storage_batch_operations.ListBucketOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[storage_batch_operations_types.BucketOperation]: + for page in self.pages: + yield from page.bucket_operations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBucketOperationsAsyncPager: + """A pager for iterating through ``list_bucket_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``bucket_operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBucketOperations`` requests and continue to iterate + through the ``bucket_operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[storage_batch_operations.ListBucketOperationsResponse] + ], + request: storage_batch_operations.ListBucketOperationsRequest, + response: storage_batch_operations.ListBucketOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest): + The initial request object. + response (google.cloud.storagebatchoperations_v1.types.ListBucketOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = storage_batch_operations.ListBucketOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[storage_batch_operations.ListBucketOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[storage_batch_operations_types.BucketOperation]: + async def async_generator(): + async for page in self.pages: + for response in page.bucket_operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py index f69fd97e86a5..68ba71ccde94 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py @@ -194,6 +194,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_bucket_operations: gapic_v1.method.wrap_method( + self.list_bucket_operations, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket_operation: gapic_v1.method.wrap_method( + self.get_bucket_operation, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -294,6 +304,30 @@ def cancel_job( ]: raise NotImplementedError() + @property + def list_bucket_operations( + self, + ) -> Callable[ + [storage_batch_operations.ListBucketOperationsRequest], + Union[ + storage_batch_operations.ListBucketOperationsResponse, + Awaitable[storage_batch_operations.ListBucketOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_bucket_operation( + self, + ) -> Callable[ + [storage_batch_operations.GetBucketOperationRequest], + Union[ + storage_batch_operations_types.BucketOperation, + Awaitable[storage_batch_operations_types.BucketOperation], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py index d12ea333d439..59178f9bdc66 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py @@ -488,6 +488,64 @@ def cancel_job( ) return self._stubs["cancel_job"] + @property + def list_bucket_operations( + self, + ) -> Callable[ + [storage_batch_operations.ListBucketOperationsRequest], + storage_batch_operations.ListBucketOperationsResponse, + ]: + r"""Return a callable for the list bucket operations method over gRPC. + + Lists BucketOperations in a given project and job. + + Returns: + Callable[[~.ListBucketOperationsRequest], + ~.ListBucketOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_bucket_operations" not in self._stubs: + self._stubs["list_bucket_operations"] = self._logged_channel.unary_unary( + "/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListBucketOperations", + request_serializer=storage_batch_operations.ListBucketOperationsRequest.serialize, + response_deserializer=storage_batch_operations.ListBucketOperationsResponse.deserialize, + ) + return self._stubs["list_bucket_operations"] + + @property + def get_bucket_operation( + self, + ) -> Callable[ + [storage_batch_operations.GetBucketOperationRequest], + storage_batch_operations_types.BucketOperation, + ]: + r"""Return a callable for the get bucket operation method over gRPC. + + Gets a BucketOperation. + + Returns: + Callable[[~.GetBucketOperationRequest], + ~.BucketOperation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bucket_operation" not in self._stubs: + self._stubs["get_bucket_operation"] = self._logged_channel.unary_unary( + "/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetBucketOperation", + request_serializer=storage_batch_operations.GetBucketOperationRequest.serialize, + response_deserializer=storage_batch_operations_types.BucketOperation.deserialize, + ) + return self._stubs["get_bucket_operation"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py index e0354b427fdf..e9284b55a050 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py @@ -499,6 +499,64 @@ def cancel_job( ) return self._stubs["cancel_job"] + @property + def list_bucket_operations( + self, + ) -> Callable[ + [storage_batch_operations.ListBucketOperationsRequest], + Awaitable[storage_batch_operations.ListBucketOperationsResponse], + ]: + r"""Return a callable for the list bucket operations method over gRPC. + + Lists BucketOperations in a given project and job. + + Returns: + Callable[[~.ListBucketOperationsRequest], + Awaitable[~.ListBucketOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_bucket_operations" not in self._stubs: + self._stubs["list_bucket_operations"] = self._logged_channel.unary_unary( + "/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListBucketOperations", + request_serializer=storage_batch_operations.ListBucketOperationsRequest.serialize, + response_deserializer=storage_batch_operations.ListBucketOperationsResponse.deserialize, + ) + return self._stubs["list_bucket_operations"] + + @property + def get_bucket_operation( + self, + ) -> Callable[ + [storage_batch_operations.GetBucketOperationRequest], + Awaitable[storage_batch_operations_types.BucketOperation], + ]: + r"""Return a callable for the get bucket operation method over gRPC. + + Gets a BucketOperation. + + Returns: + Callable[[~.GetBucketOperationRequest], + Awaitable[~.BucketOperation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bucket_operation" not in self._stubs: + self._stubs["get_bucket_operation"] = self._logged_channel.unary_unary( + "/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetBucketOperation", + request_serializer=storage_batch_operations.GetBucketOperationRequest.serialize, + response_deserializer=storage_batch_operations_types.BucketOperation.deserialize, + ) + return self._stubs["get_bucket_operation"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -554,6 +612,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_bucket_operations: self._wrap_method( + self.list_bucket_operations, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket_operation: self._wrap_method( + self.get_bucket_operation, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py index 8e7486dc28fc..5ec85e73412b 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py @@ -98,6 +98,14 @@ def pre_delete_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_get_bucket_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_bucket_operation(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -106,6 +114,14 @@ def post_get_job(self, response): logging.log(f"Received response: {response}") return response + def pre_list_bucket_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_bucket_operations(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_jobs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -236,6 +252,58 @@ def pre_delete_job( """ return request, metadata + def pre_get_bucket_operation( + self, + request: storage_batch_operations.GetBucketOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + storage_batch_operations.GetBucketOperationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_bucket_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_get_bucket_operation( + self, response: storage_batch_operations_types.BucketOperation + ) -> storage_batch_operations_types.BucketOperation: + """Post-rpc interceptor for get_bucket_operation + + DEPRECATED. Please use the `post_get_bucket_operation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_get_bucket_operation` interceptor runs + before the `post_get_bucket_operation_with_metadata` interceptor. + """ + return response + + def post_get_bucket_operation_with_metadata( + self, + response: storage_batch_operations_types.BucketOperation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + storage_batch_operations_types.BucketOperation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_bucket_operation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_get_bucket_operation_with_metadata` + interceptor in new development instead of the `post_get_bucket_operation` interceptor. + When both interceptors are used, this `post_get_bucket_operation_with_metadata` interceptor runs after the + `post_get_bucket_operation` interceptor. The (possibly modified) response returned by + `post_get_bucket_operation` will be passed to + `post_get_bucket_operation_with_metadata`. + """ + return response, metadata + def pre_get_job( self, request: storage_batch_operations.GetJobRequest, @@ -286,6 +354,58 @@ def post_get_job_with_metadata( """ return response, metadata + def pre_list_bucket_operations( + self, + request: storage_batch_operations.ListBucketOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + storage_batch_operations.ListBucketOperationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_bucket_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_list_bucket_operations( + self, response: storage_batch_operations.ListBucketOperationsResponse + ) -> storage_batch_operations.ListBucketOperationsResponse: + """Post-rpc interceptor for list_bucket_operations + + DEPRECATED. Please use the `post_list_bucket_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_list_bucket_operations` interceptor runs + before the `post_list_bucket_operations_with_metadata` interceptor. + """ + return response + + def post_list_bucket_operations_with_metadata( + self, + response: storage_batch_operations.ListBucketOperationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + storage_batch_operations.ListBucketOperationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_bucket_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_list_bucket_operations_with_metadata` + interceptor in new development instead of the `post_list_bucket_operations` interceptor. + When both interceptors are used, this `post_list_bucket_operations_with_metadata` interceptor runs after the + `post_list_bucket_operations` interceptor. The (possibly modified) response returned by + `post_list_bucket_operations` will be passed to + `post_list_bucket_operations_with_metadata`. + """ + return response, metadata + def pre_list_jobs( self, request: storage_batch_operations.ListJobsRequest, @@ -1042,6 +1162,160 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _GetBucketOperation( + _BaseStorageBatchOperationsRestTransport._BaseGetBucketOperation, + StorageBatchOperationsRestStub, + ): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.GetBucketOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: storage_batch_operations.GetBucketOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations_types.BucketOperation: + r"""Call the get bucket operation method over HTTP. + + Args: + request (~.storage_batch_operations.GetBucketOperationRequest): + The request object. Message for getting a + BucketOperation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.storage_batch_operations_types.BucketOperation: + BucketOperation represents a + bucket-level breakdown of a Job. + + """ + + http_options = ( + _BaseStorageBatchOperationsRestTransport._BaseGetBucketOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_bucket_operation( + request, metadata + ) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetBucketOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseGetBucketOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetBucketOperation", + extra={ + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetBucketOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + StorageBatchOperationsRestTransport._GetBucketOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_batch_operations_types.BucketOperation() + pb_resp = storage_batch_operations_types.BucketOperation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_bucket_operation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_bucket_operation_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + storage_batch_operations_types.BucketOperation.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_bucket_operation", + extra={ + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetBucketOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetJob( _BaseStorageBatchOperationsRestTransport._BaseGetJob, StorageBatchOperationsRestStub, @@ -1191,6 +1465,162 @@ def __call__( ) return resp + class _ListBucketOperations( + _BaseStorageBatchOperationsRestTransport._BaseListBucketOperations, + StorageBatchOperationsRestStub, + ): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.ListBucketOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: storage_batch_operations.ListBucketOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations.ListBucketOperationsResponse: + r"""Call the list bucket operations method over HTTP. + + Args: + request (~.storage_batch_operations.ListBucketOperationsRequest): + The request object. Message for request to list + BucketOperations + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.storage_batch_operations.ListBucketOperationsResponse: + Message for response to listing + BucketOperations + + """ + + http_options = ( + _BaseStorageBatchOperationsRestTransport._BaseListBucketOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_bucket_operations( + request, metadata + ) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListBucketOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseListBucketOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListBucketOperations", + extra={ + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListBucketOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + StorageBatchOperationsRestTransport._ListBucketOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_batch_operations.ListBucketOperationsResponse() + pb_resp = storage_batch_operations.ListBucketOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_bucket_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_bucket_operations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + storage_batch_operations.ListBucketOperationsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_bucket_operations", + extra={ + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListBucketOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListJobs( _BaseStorageBatchOperationsRestTransport._BaseListJobs, StorageBatchOperationsRestStub, @@ -1367,6 +1797,17 @@ def delete_job( # In C++ this would require a dynamic_cast return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore + @property + def get_bucket_operation( + self, + ) -> Callable[ + [storage_batch_operations.GetBucketOperationRequest], + storage_batch_operations_types.BucketOperation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBucketOperation(self._session, self._host, self._interceptor) # type: ignore + @property def get_job( self, @@ -1377,6 +1818,17 @@ def get_job( # In C++ this would require a dynamic_cast return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + @property + def list_bucket_operations( + self, + ) -> Callable[ + [storage_batch_operations.ListBucketOperationsRequest], + storage_batch_operations.ListBucketOperationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBucketOperations(self._session, self._host, self._interceptor) # type: ignore + @property def list_jobs( self, diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py index d59715197d97..06646e217067 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py @@ -256,6 +256,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetBucketOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/jobs/*/bucketOperations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.GetBucketOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseStorageBatchOperationsRestTransport._BaseGetBucketOperation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetJob: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -303,6 +350,55 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListBucketOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/jobs/*}/bucketOperations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.ListBucketOperationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseStorageBatchOperationsRestTransport._BaseListBucketOperations._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListJobs: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/__init__.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/__init__.py index d5caac572f53..b67553f211e0 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/__init__.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/__init__.py @@ -18,13 +18,17 @@ CancelJobResponse, CreateJobRequest, DeleteJobRequest, + GetBucketOperationRequest, GetJobRequest, + ListBucketOperationsRequest, + ListBucketOperationsResponse, ListJobsRequest, ListJobsResponse, OperationMetadata, ) from .storage_batch_operations_types import ( BucketList, + BucketOperation, Counters, DeleteObject, ErrorLogEntry, @@ -44,11 +48,15 @@ "CancelJobResponse", "CreateJobRequest", "DeleteJobRequest", + "GetBucketOperationRequest", "GetJobRequest", + "ListBucketOperationsRequest", + "ListBucketOperationsResponse", "ListJobsRequest", "ListJobsResponse", "OperationMetadata", "BucketList", + "BucketOperation", "Counters", "DeleteObject", "ErrorLogEntry", diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py index c735c2f0fa71..fc53dd380a8f 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py @@ -32,6 +32,9 @@ "CancelJobRequest", "DeleteJobRequest", "CancelJobResponse", + "ListBucketOperationsRequest", + "ListBucketOperationsResponse", + "GetBucketOperationRequest", "OperationMetadata", }, ) @@ -205,6 +208,14 @@ class DeleteJobRequest(proto.Message): for at least 60 minutes since the first request. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any child bucket + operations of the job will also be deleted. + Highly recommended to be set to true by all + clients. Users cannot mutate bucket operations + directly, so only the jobs.delete permission is + required to delete a job (and its child bucket + operations). """ name: str = proto.Field( @@ -215,12 +226,107 @@ class DeleteJobRequest(proto.Message): proto.STRING, number=2, ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) class CancelJobResponse(proto.Message): r"""Message for response to cancel Job.""" +class ListBucketOperationsRequest(proto.Message): + r"""Message for request to list BucketOperations + + Attributes: + parent (str): + Required. Format: + projects/{project_id}/locations/global/jobs/{job_id}. + filter (str): + Optional. Filters results as defined by + https://google.aip.dev/160. + page_size (int): + Optional. The list page size. Default page + size is 100. + page_token (str): + Optional. The list page token. + order_by (str): + Optional. Field to sort by. Supported fields are name, + create_time. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBucketOperationsResponse(proto.Message): + r"""Message for response to listing BucketOperations + + Attributes: + bucket_operations (MutableSequence[google.cloud.storagebatchoperations_v1.types.BucketOperation]): + A list of storage batch bucket operations. + next_page_token (str): + A token identifying a page of results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + bucket_operations: MutableSequence[ + storage_batch_operations_types.BucketOperation + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage_batch_operations_types.BucketOperation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBucketOperationRequest(proto.Message): + r"""Message for getting a BucketOperation. + + Attributes: + name (str): + Required. ``name`` of the bucket operation to retrieve. + Format: + projects/{project_id}/locations/global/jobs/{job_id}/bucketOperations/{bucket_operation_id}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py index 3658399470e0..883c1f258779 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py @@ -25,6 +25,7 @@ package="google.cloud.storagebatchoperations.v1", manifest={ "Job", + "BucketOperation", "BucketList", "Manifest", "PrefixList", @@ -113,6 +114,11 @@ class Job(proto.Message): if the object configuration is a prefix list, the bytes found from source. No transformations will be performed. + is_multi_bucket_job (bool): + Output only. If true, this Job operates on + multiple buckets. Multibucket jobs are subject + to different quota limits than single-bucket + jobs. """ class State(proto.Enum): @@ -129,12 +135,15 @@ class State(proto.Enum): Cancelled by the user. FAILED (4): Terminated due to an unrecoverable failure. + QUEUED (5): + Queued but not yet started. """ STATE_UNSPECIFIED = 0 RUNNING = 1 SUCCEEDED = 2 CANCELED = 3 FAILED = 4 + QUEUED = 5 name: str = proto.Field( proto.STRING, @@ -213,6 +222,176 @@ class State(proto.Enum): proto.BOOL, number=22, ) + is_multi_bucket_job: bool = proto.Field( + proto.BOOL, + number=24, + ) + + +class BucketOperation(proto.Message): + r"""BucketOperation represents a bucket-level breakdown of a Job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the BucketOperation. This + is defined by the service. Format: + projects/{project}/locations/global/jobs/{job_id}/bucketOperations/{bucket_operation}. + bucket_name (str): + The bucket name of the objects to be + transformed in the BucketOperation. + prefix_list (google.cloud.storagebatchoperations_v1.types.PrefixList): + Specifies objects matching a prefix set. + + This field is a member of `oneof`_ ``object_configuration``. + manifest (google.cloud.storagebatchoperations_v1.types.Manifest): + Specifies objects in a manifest file. + + This field is a member of `oneof`_ ``object_configuration``. + put_object_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold): + Changes object hold status. + + This field is a member of `oneof`_ ``transformation``. + delete_object (google.cloud.storagebatchoperations_v1.types.DeleteObject): + Delete objects. + + This field is a member of `oneof`_ ``transformation``. + put_metadata (google.cloud.storagebatchoperations_v1.types.PutMetadata): + Updates object metadata. Allows updating + fixed-key and custom metadata and fixed-key + metadata i.e. Cache-Control, + Content-Disposition, Content-Encoding, + Content-Language, Content-Type, Custom-Time. + + This field is a member of `oneof`_ ``transformation``. + rewrite_object (google.cloud.storagebatchoperations_v1.types.RewriteObject): + Rewrite the object and updates metadata like + KMS key. + + This field is a member of `oneof`_ ``transformation``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the + BucketOperation was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the + BucketOperation was started. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the + BucketOperation was completed. + counters (google.cloud.storagebatchoperations_v1.types.Counters): + Output only. Information about the progress + of the bucket operation. + error_summaries (MutableSequence[google.cloud.storagebatchoperations_v1.types.ErrorSummary]): + Output only. Summarizes errors encountered + with sample error log entries. + state (google.cloud.storagebatchoperations_v1.types.BucketOperation.State): + Output only. State of the BucketOperation. + """ + + class State(proto.Enum): + r"""Describes state of the BucketOperation. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + QUEUED (1): + Created but not yet started. + RUNNING (2): + In progress. + SUCCEEDED (3): + Completed successfully. + CANCELED (4): + Cancelled by the user. + FAILED (5): + Terminated due to an unrecoverable failure. + """ + STATE_UNSPECIFIED = 0 + QUEUED = 1 + RUNNING = 2 + SUCCEEDED = 3 + CANCELED = 4 + FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + bucket_name: str = proto.Field( + proto.STRING, + number=2, + ) + prefix_list: "PrefixList" = proto.Field( + proto.MESSAGE, + number=3, + oneof="object_configuration", + message="PrefixList", + ) + manifest: "Manifest" = proto.Field( + proto.MESSAGE, + number=4, + oneof="object_configuration", + message="Manifest", + ) + put_object_hold: "PutObjectHold" = proto.Field( + proto.MESSAGE, + number=11, + oneof="transformation", + message="PutObjectHold", + ) + delete_object: "DeleteObject" = proto.Field( + proto.MESSAGE, + number=12, + oneof="transformation", + message="DeleteObject", + ) + put_metadata: "PutMetadata" = proto.Field( + proto.MESSAGE, + number=13, + oneof="transformation", + message="PutMetadata", + ) + rewrite_object: "RewriteObject" = proto.Field( + proto.MESSAGE, + number=14, + oneof="transformation", + message="RewriteObject", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + counters: "Counters" = proto.Field( + proto.MESSAGE, + number=8, + message="Counters", + ) + error_summaries: MutableSequence["ErrorSummary"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="ErrorSummary", + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) class BucketList(proto.Message): diff --git a/packages/google-cloud-storagebatchoperations/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json b/packages/google-cloud-storagebatchoperations/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json index d14e13dfe937..51a1123fadc6 100644 --- a/packages/google-cloud-storagebatchoperations/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json +++ b/packages/google-cloud-storagebatchoperations/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json @@ -504,6 +504,167 @@ ], "title": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.get_bucket_operation", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetBucketOperation", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "GetBucketOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.GetBucketOperationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.BucketOperation", + "shortName": "get_bucket_operation" + }, + "description": "Sample for GetBucketOperation", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_bucket_operation", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetBucketOperation", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "GetBucketOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.GetBucketOperationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.BucketOperation", + "shortName": "get_bucket_operation" + }, + "description": "Sample for GetBucketOperation", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py" + }, { "canonical": true, "clientMethod": { @@ -665,6 +826,167 @@ ], "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.list_bucket_operations", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListBucketOperations", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "ListBucketOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListBucketOperationsAsyncPager", + "shortName": "list_bucket_operations" + }, + "description": "Sample for ListBucketOperations", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_bucket_operations", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListBucketOperations", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "ListBucketOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.ListBucketOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListBucketOperationsPager", + "shortName": "list_bucket_operations" + }, + "description": "Sample for ListBucketOperations", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py new file mode 100644 index 000000000000..0b2850c9f2ac --- /dev/null +++ b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucketOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_get_bucket_operation(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetBucketOperationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket_operation(request=request) + + # Handle the response + print(response) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_async] diff --git a/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py new file mode 100644 index 000000000000..9c1810d24d4c --- /dev/null +++ b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_bucket_operation_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucketOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_get_bucket_operation(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetBucketOperationRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket_operation(request=request) + + # Handle the response + print(response) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetBucketOperation_sync] diff --git a/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py new file mode 100644 index 000000000000..e52e39208626 --- /dev/null +++ b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBucketOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_list_bucket_operations(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListBucketOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_bucket_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_async] diff --git a/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py new file mode 100644 index 000000000000..dd05bca02d34 --- /dev/null +++ b/packages/google-cloud-storagebatchoperations/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_bucket_operations_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBucketOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_list_bucket_operations(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListBucketOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_bucket_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListBucketOperations_sync] diff --git a/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py b/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py index 280df78236ee..76d2dba42830 100644 --- a/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py +++ b/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py @@ -1939,6 +1939,7 @@ def test_get_job(request_type, transport: str = "grpc"): description="description_value", state=storage_batch_operations_types.Job.State.RUNNING, dry_run=True, + is_multi_bucket_job=True, ) response = client.get_job(request) @@ -1954,6 +1955,7 @@ def test_get_job(request_type, transport: str = "grpc"): assert response.description == "description_value" assert response.state == storage_batch_operations_types.Job.State.RUNNING assert response.dry_run is True + assert response.is_multi_bucket_job is True def test_get_job_non_empty_request_with_auto_populated_field(): @@ -2081,6 +2083,7 @@ async def test_get_job_async( description="description_value", state=storage_batch_operations_types.Job.State.RUNNING, dry_run=True, + is_multi_bucket_job=True, ) ) response = await client.get_job(request) @@ -2097,6 +2100,7 @@ async def test_get_job_async( assert response.description == "description_value" assert response.state == storage_batch_operations_types.Job.State.RUNNING assert response.dry_run is True + assert response.is_multi_bucket_job is True @pytest.mark.asyncio @@ -3221,6 +3225,917 @@ async def test_cancel_job_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + storage_batch_operations.ListBucketOperationsRequest, + dict, + ], +) +def test_list_bucket_operations(request_type, transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListBucketOperationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.ListBucketOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketOperationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_bucket_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.ListBucketOperationsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_bucket_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.ListBucketOperationsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + order_by="order_by_value", + ) + + +def test_list_bucket_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_bucket_operations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_bucket_operations + ] = mock_rpc + request = {} + client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_bucket_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_bucket_operations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_bucket_operations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_bucket_operations + ] = mock_rpc + + request = {} + await client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_bucket_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_bucket_operations_async( + transport: str = "grpc_asyncio", + request_type=storage_batch_operations.ListBucketOperationsRequest, +): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations.ListBucketOperationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.ListBucketOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketOperationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_bucket_operations_async_from_dict(): + await test_list_bucket_operations_async(request_type=dict) + + +def test_list_bucket_operations_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.ListBucketOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + call.return_value = storage_batch_operations.ListBucketOperationsResponse() + client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_bucket_operations_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.ListBucketOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations.ListBucketOperationsResponse() + ) + await client.list_bucket_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_bucket_operations_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListBucketOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_bucket_operations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_bucket_operations_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_bucket_operations( + storage_batch_operations.ListBucketOperationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_bucket_operations_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListBucketOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations.ListBucketOperationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_bucket_operations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_bucket_operations_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_bucket_operations( + storage_batch_operations.ListBucketOperationsRequest(), + parent="parent_value", + ) + + +def test_list_bucket_operations_pager(transport_name: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[], + next_page_token="def", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_bucket_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, storage_batch_operations_types.BucketOperation) + for i in results + ) + + +def test_list_bucket_operations_pages(transport_name: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[], + next_page_token="def", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_bucket_operations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_bucket_operations_async_pager(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[], + next_page_token="def", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_bucket_operations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, storage_batch_operations_types.BucketOperation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_bucket_operations_async_pages(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[], + next_page_token="def", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_bucket_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + storage_batch_operations.GetBucketOperationRequest, + dict, + ], +) +def test_get_bucket_operation(request_type, transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.BucketOperation( + name="name_value", + bucket_name="bucket_name_value", + state=storage_batch_operations_types.BucketOperation.State.QUEUED, + ) + response = client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.GetBucketOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations_types.BucketOperation) + assert response.name == "name_value" + assert response.bucket_name == "bucket_name_value" + assert response.state == storage_batch_operations_types.BucketOperation.State.QUEUED + + +def test_get_bucket_operation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.GetBucketOperationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_bucket_operation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.GetBucketOperationRequest( + name="name_value", + ) + + +def test_get_bucket_operation_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_bucket_operation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_bucket_operation + ] = mock_rpc + request = {} + client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_bucket_operation_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_bucket_operation + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket_operation + ] = mock_rpc + + request = {} + await client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_bucket_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_bucket_operation_async( + transport: str = "grpc_asyncio", + request_type=storage_batch_operations.GetBucketOperationRequest, +): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations_types.BucketOperation( + name="name_value", + bucket_name="bucket_name_value", + state=storage_batch_operations_types.BucketOperation.State.QUEUED, + ) + ) + response = await client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.GetBucketOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations_types.BucketOperation) + assert response.name == "name_value" + assert response.bucket_name == "bucket_name_value" + assert response.state == storage_batch_operations_types.BucketOperation.State.QUEUED + + +@pytest.mark.asyncio +async def test_get_bucket_operation_async_from_dict(): + await test_get_bucket_operation_async(request_type=dict) + + +def test_get_bucket_operation_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.GetBucketOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + call.return_value = storage_batch_operations_types.BucketOperation() + client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_bucket_operation_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.GetBucketOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations_types.BucketOperation() + ) + await client.get_bucket_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_bucket_operation_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.BucketOperation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_bucket_operation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_bucket_operation_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_bucket_operation( + storage_batch_operations.GetBucketOperationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_bucket_operation_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.BucketOperation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations_types.BucketOperation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_bucket_operation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_bucket_operation_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_bucket_operation( + storage_batch_operations.GetBucketOperationRequest(), + name="name_value", + ) + + def test_list_jobs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3235,35 +4150,476 @@ def test_list_jobs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_jobs_rest_required_fields( + request_type=storage_batch_operations.ListJobsRequest, +): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_jobs_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) + + +def test_list_jobs_rest_flattened_error(transport: str = "rest"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + storage_batch_operations.ListJobsRequest(), + parent="parent_value", + ) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storage_batch_operations.ListJobsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storage_batch_operations_types.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc request = {} - client.list_jobs(request) + client.get_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_jobs(request) + client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_jobs_rest_required_fields( - request_type=storage_batch_operations.ListJobsRequest, +def test_get_job_rest_required_fields( + request_type=storage_batch_operations.GetJobRequest, +): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations_types.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations_types.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_job_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations_types.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations_types.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_get_job_rest_flattened_error(transport: str = "rest"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + storage_batch_operations.GetJobRequest(), + name="name_value", + ) + + +def test_create_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + + request = {} + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_job_rest_required_fields( + request_type=storage_batch_operations.CreateJobRequest, ): transport_class = transports.StorageBatchOperationsRestTransport request_init = {} request_init["parent"] = "" + request_init["job_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3271,26 +4627,28 @@ def test_list_jobs_rest_required_fields( ) # verify fields with default values are dropped + assert "jobId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_jobs._get_unset_required_fields(jsonified_request) + ).create_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == request_init["job_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["jobId"] = "job_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_jobs._get_unset_required_fields(jsonified_request) + ).create_job._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "job_id", + "request_id", ) ) jsonified_request.update(unset_fields) @@ -3298,6 +4656,8 @@ def test_list_jobs_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3306,7 +4666,7 @@ def test_list_jobs_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.ListJobsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3318,49 +4678,57 @@ def test_list_jobs_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations.ListJobsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_jobs(request) + response = client.create_job(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "jobId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_jobs_rest_unset_required_fields(): +def test_create_job_rest_unset_required_fields(): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_jobs._get_unset_required_fields({}) + unset_fields = transport.create_job._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "jobId", + "requestId", + ) + ) + & set( + ( + "parent", + "jobId", + "job", ) ) - & set(("parent",)) ) -def test_list_jobs_rest_flattened(): +def test_create_job_rest_flattened(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3369,7 +4737,7 @@ def test_list_jobs_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.ListJobsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -3377,20 +4745,20 @@ def test_list_jobs_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + job=storage_batch_operations_types.Job(name="name_value"), + job_id="job_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_batch_operations.ListJobsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_jobs(**mock_args) + client.create_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3402,7 +4770,7 @@ def test_list_jobs_rest_flattened(): ) -def test_list_jobs_rest_flattened_error(transport: str = "rest"): +def test_create_job_rest_flattened_error(transport: str = "rest"): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3411,76 +4779,15 @@ def test_list_jobs_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_jobs( - storage_batch_operations.ListJobsRequest(), + client.create_job( + storage_batch_operations.CreateJobRequest(), parent="parent_value", + job=storage_batch_operations_types.Job(name="name_value"), + job_id="job_id_value", ) -def test_list_jobs_rest_pager(transport: str = "rest"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token="abc", - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token="ghi", - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - storage_batch_operations.ListJobsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, storage_batch_operations_types.Job) for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_job_rest_use_cached_wrapped_rpc(): +def test_delete_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3494,30 +4801,30 @@ def test_get_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods + assert client._transport.delete_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc request = {} - client.get_job(request) + client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job(request) + client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_job_rest_required_fields( - request_type=storage_batch_operations.GetJobRequest, +def test_delete_job_rest_required_fields( + request_type=storage_batch_operations.DeleteJobRequest, ): transport_class = transports.StorageBatchOperationsRestTransport @@ -3533,7 +4840,7 @@ def test_get_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) + ).delete_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3542,7 +4849,14 @@ def test_get_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) + ).delete_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3556,7 +4870,7 @@ def test_get_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_batch_operations_types.Job() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3568,39 +4882,44 @@ def test_get_job_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations_types.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job(request) + response = client.delete_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_job_rest_unset_required_fields(): +def test_delete_job_rest_unset_required_fields(): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) -def test_get_job_rest_flattened(): +def test_delete_job_rest_flattened(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3609,7 +4928,7 @@ def test_get_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_batch_operations_types.Job() + return_value = None # get arguments that satisfy an http rule for this method sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} @@ -3623,14 +4942,12 @@ def test_get_job_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_batch_operations_types.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(**mock_args) + client.delete_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3642,7 +4959,7 @@ def test_get_job_rest_flattened(): ) -def test_get_job_rest_flattened_error(transport: str = "rest"): +def test_delete_job_rest_flattened_error(transport: str = "rest"): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3651,13 +4968,13 @@ def test_get_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job( - storage_batch_operations.GetJobRequest(), + client.delete_job( + storage_batch_operations.DeleteJobRequest(), name="name_value", ) -def test_create_job_rest_use_cached_wrapped_rpc(): +def test_cancel_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3671,40 +4988,35 @@ def test_create_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.create_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_job(request) + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_job_rest_required_fields( - request_type=storage_batch_operations.CreateJobRequest, +def test_cancel_job_rest_required_fields( + request_type=storage_batch_operations.CancelJobRequest, ): transport_class = transports.StorageBatchOperationsRestTransport request_init = {} - request_init["parent"] = "" - request_init["job_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3712,37 +5024,24 @@ def test_create_job_rest_required_fields( ) # verify fields with default values are dropped - assert "jobId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) + ).cancel_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == request_init["job_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["jobId"] = "job_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "job_id", - "request_id", - ) - ) + ).cancel_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == "job_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3751,7 +5050,7 @@ def test_create_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = storage_batch_operations.CancelJobResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3771,49 +5070,32 @@ def test_create_job_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job(request) + response = client.cancel_job(request) - expected_params = [ - ( - "jobId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_job_rest_unset_required_fields(): +def test_cancel_job_rest_unset_required_fields(): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "jobId", - "requestId", - ) - ) - & set( - ( - "parent", - "jobId", - "job", - ) - ) - ) + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_job_rest_flattened(): +def test_cancel_job_rest_flattened(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3822,40 +5104,41 @@ def test_create_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = storage_batch_operations.CancelJobResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - job=storage_batch_operations_types.Job(name="name_value"), - job_id="job_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_job(**mock_args) + client.cancel_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" + % client.transport._host, args[1], ) -def test_create_job_rest_flattened_error(transport: str = "rest"): +def test_cancel_job_rest_flattened_error(transport: str = "rest"): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3864,15 +5147,13 @@ def test_create_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_job( - storage_batch_operations.CreateJobRequest(), - parent="parent_value", - job=storage_batch_operations_types.Job(name="name_value"), - job_id="job_id_value", + client.cancel_job( + storage_batch_operations.CancelJobRequest(), + name="name_value", ) -def test_delete_job_rest_use_cached_wrapped_rpc(): +def test_list_bucket_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3886,35 +5167,40 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods + assert ( + client._transport.list_bucket_operations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_bucket_operations + ] = mock_rpc request = {} - client.delete_job(request) + client.list_bucket_operations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_job(request) + client.list_bucket_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_job_rest_required_fields( - request_type=storage_batch_operations.DeleteJobRequest, +def test_list_bucket_operations_rest_required_fields( + request_type=storage_batch_operations.ListBucketOperationsRequest, ): transport_class = transports.StorageBatchOperationsRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3925,23 +5211,30 @@ def test_delete_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_job._get_unset_required_fields(jsonified_request) + ).list_bucket_operations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_job._get_unset_required_fields(jsonified_request) + ).list_bucket_operations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3950,7 +5243,7 @@ def test_delete_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = storage_batch_operations.ListBucketOperationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3962,36 +5255,51 @@ def test_delete_job_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = storage_batch_operations.ListBucketOperationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job(request) + response = client.list_bucket_operations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_job_rest_unset_required_fields(): +def test_list_bucket_operations_rest_unset_required_fields(): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_bucket_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_delete_job_rest_flattened(): +def test_list_bucket_operations_rest_flattened(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4000,38 +5308,43 @@ def test_delete_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = storage_batch_operations.ListBucketOperationsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2/jobs/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = storage_batch_operations.ListBucketOperationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(**mock_args) + client.list_bucket_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*/jobs/*}/bucketOperations" + % client.transport._host, args[1], ) -def test_delete_job_rest_flattened_error(transport: str = "rest"): +def test_list_bucket_operations_rest_flattened_error(transport: str = "rest"): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4040,13 +5353,80 @@ def test_delete_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_job( - storage_batch_operations.DeleteJobRequest(), - name="name_value", + client.list_bucket_operations( + storage_batch_operations.ListBucketOperationsRequest(), + parent="parent_value", ) -def test_cancel_job_rest_use_cached_wrapped_rpc(): +def test_list_bucket_operations_rest_pager(transport: str = "rest"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="abc", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[], + next_page_token="def", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + ], + next_page_token="ghi", + ), + storage_batch_operations.ListBucketOperationsResponse( + bucket_operations=[ + storage_batch_operations_types.BucketOperation(), + storage_batch_operations_types.BucketOperation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storage_batch_operations.ListBucketOperationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/jobs/sample3"} + + pager = client.list_bucket_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, storage_batch_operations_types.BucketOperation) + for i in results + ) + + pages = list(client.list_bucket_operations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_bucket_operation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4060,30 +5440,34 @@ def test_cancel_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods + assert ( + client._transport.get_bucket_operation in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_bucket_operation + ] = mock_rpc request = {} - client.cancel_job(request) + client.get_bucket_operation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_job(request) + client.get_bucket_operation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_cancel_job_rest_required_fields( - request_type=storage_batch_operations.CancelJobRequest, +def test_get_bucket_operation_rest_required_fields( + request_type=storage_batch_operations.GetBucketOperationRequest, ): transport_class = transports.StorageBatchOperationsRestTransport @@ -4099,7 +5483,7 @@ def test_cancel_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_job._get_unset_required_fields(jsonified_request) + ).get_bucket_operation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4108,7 +5492,7 @@ def test_cancel_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_job._get_unset_required_fields(jsonified_request) + ).get_bucket_operation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4122,7 +5506,7 @@ def test_cancel_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse() + return_value = storage_batch_operations_types.BucketOperation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4134,40 +5518,41 @@ def test_cancel_job_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + return_value = storage_batch_operations_types.BucketOperation.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) + response = client.get_bucket_operation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_cancel_job_rest_unset_required_fields(): +def test_get_bucket_operation_rest_unset_required_fields(): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.cancel_job._get_unset_required_fields({}) + unset_fields = transport.get_bucket_operation._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_cancel_job_rest_flattened(): +def test_get_bucket_operation_rest_flattened(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4176,10 +5561,12 @@ def test_cancel_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse() + return_value = storage_batch_operations_types.BucketOperation() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/jobs/sample3/bucketOperations/sample4" + } # get truthy value for each flattened field mock_args = dict( @@ -4191,26 +5578,26 @@ def test_cancel_job_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + return_value = storage_batch_operations_types.BucketOperation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(**mock_args) + client.get_bucket_operation(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" + "%s/v1/{name=projects/*/locations/*/jobs/*/bucketOperations/*}" % client.transport._host, args[1], ) -def test_cancel_job_rest_flattened_error(transport: str = "rest"): +def test_get_bucket_operation_rest_flattened_error(transport: str = "rest"): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4219,8 +5606,8 @@ def test_cancel_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_job( - storage_batch_operations.CancelJobRequest(), + client.get_bucket_operation( + storage_batch_operations.GetBucketOperationRequest(), name="name_value", ) @@ -4436,6 +5823,52 @@ def test_cancel_job_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_bucket_operations_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + call.return_value = storage_batch_operations.ListBucketOperationsResponse() + client.list_bucket_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListBucketOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_operation_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + call.return_value = storage_batch_operations_types.BucketOperation() + client.get_bucket_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.GetBucketOperationRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = StorageBatchOperationsAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -4496,6 +5929,7 @@ async def test_get_job_empty_call_grpc_asyncio(): description="description_value", state=storage_batch_operations_types.Job.State.RUNNING, dry_run=True, + is_multi_bucket_job=True, ) ) await client.get_job(request=None) @@ -4551,7 +5985,62 @@ async def test_delete_job_empty_call_grpc_asyncio(): # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.DeleteJobRequest() + request_msg = storage_batch_operations.DeleteJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_job_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations.CancelJobResponse() + ) + await client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_bucket_operations_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storage_batch_operations.ListBucketOperationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_bucket_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListBucketOperationsRequest() assert args[0] == request_msg @@ -4559,24 +6048,30 @@ async def test_delete_job_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): +async def test_get_bucket_operation_empty_call_grpc_asyncio(): client = StorageBatchOperationsAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - storage_batch_operations.CancelJobResponse() + storage_batch_operations_types.BucketOperation( + name="name_value", + bucket_name="bucket_name_value", + state=storage_batch_operations_types.BucketOperation.State.QUEUED, + ) ) - await client.cancel_job(request=None) + await client.get_bucket_operation(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CancelJobRequest() + request_msg = storage_batch_operations.GetBucketOperationRequest() assert args[0] == request_msg @@ -4769,6 +6264,7 @@ def test_get_job_rest_call_success(request_type): description="description_value", state=storage_batch_operations_types.Job.State.RUNNING, dry_run=True, + is_multi_bucket_job=True, ) # Wrap the value into a proper Response obj @@ -4789,6 +6285,7 @@ def test_get_job_rest_call_success(request_type): assert response.description == "description_value" assert response.state == storage_batch_operations_types.Job.State.RUNNING assert response.dry_run is True + assert response.is_multi_bucket_job is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4954,6 +6451,7 @@ def test_create_job_rest_call_success(request_type): ], "state": 1, "dry_run": True, + "is_multi_bucket_job": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -4968,82 +6466,318 @@ def get_message_fields(field): # If the field is not a composite type, return an empty list. message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["job"][field])): + del request_init["job"][field][i][subfield] + else: + del request_init["job"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_job(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, + "post_create_job_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.CreateJobRequest.pb( + storage_batch_operations.CreateJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = storage_batch_operations.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_job_rest_bad_request( + request_type=storage_batch_operations.DeleteJobRequest, +): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_job(request) + + +@pytest.mark.parametrize( + "request_type", + [ + storage_batch_operations.DeleteJobRequest, + dict, + ], +) +def test_delete_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, "pre_delete_job" + ) as pre: + pre.assert_not_called() + pb_message = storage_batch_operations.DeleteJobRequest.pb( + storage_batch_operations.DeleteJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = storage_batch_operations.DeleteJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_cancel_job_rest_bad_request( + request_type=storage_batch_operations.CancelJobRequest, +): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_job(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + storage_batch_operations.CancelJobRequest, + dict, + ], +) +def test_cancel_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = storage_batch_operations.CancelJobResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job(request) + response = client.cancel_job(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, storage_batch_operations.CancelJobResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): +def test_cancel_job_rest_interceptors(null_interceptor): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5057,20 +6791,18 @@ def test_create_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.StorageBatchOperationsRestInterceptor, "post_create_job" + transports.StorageBatchOperationsRestInterceptor, "post_cancel_job" ) as post, mock.patch.object( transports.StorageBatchOperationsRestInterceptor, - "post_create_job_with_metadata", + "post_cancel_job_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.StorageBatchOperationsRestInterceptor, "pre_create_job" + transports.StorageBatchOperationsRestInterceptor, "pre_cancel_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.CreateJobRequest.pb( - storage_batch_operations.CreateJobRequest() + pb_message = storage_batch_operations.CancelJobRequest.pb( + storage_batch_operations.CancelJobRequest() ) transcode.return_value = { "method": "post", @@ -5082,19 +6814,24 @@ def test_create_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = storage_batch_operations.CancelJobResponse.to_json( + storage_batch_operations.CancelJobResponse() + ) req.return_value.content = return_value - request = storage_batch_operations.CreateJobRequest() + request = storage_batch_operations.CancelJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = storage_batch_operations.CancelJobResponse() + post_with_metadata.return_value = ( + storage_batch_operations.CancelJobResponse(), + metadata, + ) - client.create_job( + client.cancel_job( request, metadata=[ ("key", "val"), @@ -5107,14 +6844,14 @@ def test_create_job_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_job_rest_bad_request( - request_type=storage_batch_operations.DeleteJobRequest, +def test_list_bucket_operations_rest_bad_request( + request_type=storage_batch_operations.ListBucketOperationsRequest, ): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5129,45 +6866,55 @@ def test_delete_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(request) + client.list_bucket_operations(request) @pytest.mark.parametrize( "request_type", [ - storage_batch_operations.DeleteJobRequest, + storage_batch_operations.ListBucketOperationsRequest, dict, ], ) -def test_delete_job_rest_call_success(request_type): +def test_list_bucket_operations_rest_call_success(request_type): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = storage_batch_operations.ListBucketOperationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = storage_batch_operations.ListBucketOperationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job(request) + response = client.list_bucket_operations(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListBucketOperationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): +def test_list_bucket_operations_rest_interceptors(null_interceptor): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5181,11 +6928,18 @@ def test_delete_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.StorageBatchOperationsRestInterceptor, "pre_delete_job" + transports.StorageBatchOperationsRestInterceptor, "post_list_bucket_operations" + ) as post, mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, + "post_list_bucket_operations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.StorageBatchOperationsRestInterceptor, "pre_list_bucket_operations" ) as pre: pre.assert_not_called() - pb_message = storage_batch_operations.DeleteJobRequest.pb( - storage_batch_operations.DeleteJobRequest() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.ListBucketOperationsRequest.pb( + storage_batch_operations.ListBucketOperationsRequest() ) transcode.return_value = { "method": "post", @@ -5197,15 +6951,24 @@ def test_delete_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = storage_batch_operations.ListBucketOperationsResponse.to_json( + storage_batch_operations.ListBucketOperationsResponse() + ) + req.return_value.content = return_value - request = storage_batch_operations.DeleteJobRequest() + request = storage_batch_operations.ListBucketOperationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = storage_batch_operations.ListBucketOperationsResponse() + post_with_metadata.return_value = ( + storage_batch_operations.ListBucketOperationsResponse(), + metadata, + ) - client.delete_job( + client.list_bucket_operations( request, metadata=[ ("key", "val"), @@ -5214,16 +6977,20 @@ def test_delete_job_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_cancel_job_rest_bad_request( - request_type=storage_batch_operations.CancelJobRequest, +def test_get_bucket_operation_rest_bad_request( + request_type=storage_batch_operations.GetBucketOperationRequest, ): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/jobs/sample3/bucketOperations/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5238,48 +7005,57 @@ def test_cancel_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(request) + client.get_bucket_operation(request) @pytest.mark.parametrize( "request_type", [ - storage_batch_operations.CancelJobRequest, + storage_batch_operations.GetBucketOperationRequest, dict, ], ) -def test_cancel_job_rest_call_success(request_type): +def test_get_bucket_operation_rest_call_success(request_type): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/jobs/sample3/bucketOperations/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse() + return_value = storage_batch_operations_types.BucketOperation( + name="name_value", + bucket_name="bucket_name_value", + state=storage_batch_operations_types.BucketOperation.State.QUEUED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + return_value = storage_batch_operations_types.BucketOperation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) + response = client.get_bucket_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations.CancelJobResponse) + assert isinstance(response, storage_batch_operations_types.BucketOperation) + assert response.name == "name_value" + assert response.bucket_name == "bucket_name_value" + assert response.state == storage_batch_operations_types.BucketOperation.State.QUEUED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): +def test_get_bucket_operation_rest_interceptors(null_interceptor): transport = transports.StorageBatchOperationsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5293,18 +7069,18 @@ def test_cancel_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.StorageBatchOperationsRestInterceptor, "post_cancel_job" + transports.StorageBatchOperationsRestInterceptor, "post_get_bucket_operation" ) as post, mock.patch.object( transports.StorageBatchOperationsRestInterceptor, - "post_cancel_job_with_metadata", + "post_get_bucket_operation_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.StorageBatchOperationsRestInterceptor, "pre_cancel_job" + transports.StorageBatchOperationsRestInterceptor, "pre_get_bucket_operation" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.CancelJobRequest.pb( - storage_batch_operations.CancelJobRequest() + pb_message = storage_batch_operations.GetBucketOperationRequest.pb( + storage_batch_operations.GetBucketOperationRequest() ) transcode.return_value = { "method": "post", @@ -5316,24 +7092,24 @@ def test_cancel_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = storage_batch_operations.CancelJobResponse.to_json( - storage_batch_operations.CancelJobResponse() + return_value = storage_batch_operations_types.BucketOperation.to_json( + storage_batch_operations_types.BucketOperation() ) req.return_value.content = return_value - request = storage_batch_operations.CancelJobRequest() + request = storage_batch_operations.GetBucketOperationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = storage_batch_operations.CancelJobResponse() + post.return_value = storage_batch_operations_types.BucketOperation() post_with_metadata.return_value = ( - storage_batch_operations.CancelJobResponse(), + storage_batch_operations_types.BucketOperation(), metadata, ) - client.cancel_job( + client.get_bucket_operation( request, metadata=[ ("key", "val"), @@ -5821,6 +7597,50 @@ def test_cancel_job_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_bucket_operations_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_bucket_operations), "__call__" + ) as call: + client.list_bucket_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListBucketOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_operation_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket_operation), "__call__" + ) as call: + client.get_bucket_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.GetBucketOperationRequest() + + assert args[0] == request_msg + + def test_storage_batch_operations_rest_lro_client(): client = StorageBatchOperationsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5876,6 +7696,8 @@ def test_storage_batch_operations_base_transport(): "create_job", "delete_job", "cancel_job", + "list_bucket_operations", + "get_bucket_operation", "get_location", "list_locations", "get_operation", @@ -6164,6 +7986,12 @@ def test_storage_batch_operations_client_transport_session_collision(transport_n session1 = client1.transport.cancel_job._session session2 = client2.transport.cancel_job._session assert session1 != session2 + session1 = client1.transport.list_bucket_operations._session + session2 = client2.transport.list_bucket_operations._session + assert session1 != session2 + session1 = client1.transport.get_bucket_operation._session + session2 = client2.transport.get_bucket_operation._session + assert session1 != session2 def test_storage_batch_operations_grpc_transport_channel(): @@ -6327,11 +8155,42 @@ def test_storage_batch_operations_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_crypto_key_path(): +def test_bucket_operation_path(): project = "squid" location = "clam" - key_ring = "whelk" - crypto_key = "octopus" + job = "whelk" + bucket_operation = "octopus" + expected = "projects/{project}/locations/{location}/jobs/{job}/bucketOperations/{bucket_operation}".format( + project=project, + location=location, + job=job, + bucket_operation=bucket_operation, + ) + actual = StorageBatchOperationsClient.bucket_operation_path( + project, location, job, bucket_operation + ) + assert expected == actual + + +def test_parse_bucket_operation_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "job": "cuttlefish", + "bucket_operation": "mussel", + } + path = StorageBatchOperationsClient.bucket_operation_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_bucket_operation_path(path) + assert expected == actual + + +def test_crypto_key_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( project=project, location=location, @@ -6346,10 +8205,10 @@ def test_crypto_key_path(): def test_parse_crypto_key_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "key_ring": "cuttlefish", - "crypto_key": "mussel", + "project": "squid", + "location": "clam", + "key_ring": "whelk", + "crypto_key": "octopus", } path = StorageBatchOperationsClient.crypto_key_path(**expected) @@ -6359,9 +8218,9 @@ def test_parse_crypto_key_path(): def test_job_path(): - project = "winkle" - location = "nautilus" - job = "scallop" + project = "oyster" + location = "nudibranch" + job = "cuttlefish" expected = "projects/{project}/locations/{location}/jobs/{job}".format( project=project, location=location, @@ -6373,9 +8232,9 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "job": "clam", + "project": "mussel", + "location": "winkle", + "job": "nautilus", } path = StorageBatchOperationsClient.job_path(**expected) @@ -6385,7 +8244,7 @@ def test_parse_job_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6395,7 +8254,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = StorageBatchOperationsClient.common_billing_account_path(**expected) @@ -6405,7 +8264,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -6415,7 +8274,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = StorageBatchOperationsClient.common_folder_path(**expected) @@ -6425,7 +8284,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -6435,7 +8294,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = StorageBatchOperationsClient.common_organization_path(**expected) @@ -6445,7 +8304,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -6455,7 +8314,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = StorageBatchOperationsClient.common_project_path(**expected) @@ -6465,8 +8324,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6477,8 +8336,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = StorageBatchOperationsClient.common_location_path(**expected) diff --git a/packages/google-maps-places/google/maps/places_v1/types/content_block.py b/packages/google-maps-places/google/maps/places_v1/types/content_block.py index f23573c3134a..1078a3b95300 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/content_block.py +++ b/packages/google-maps-places/google/maps/places_v1/types/content_block.py @@ -20,8 +20,6 @@ import google.type.localized_text_pb2 as localized_text_pb2 # type: ignore import proto # type: ignore -from google.maps.places_v1.types import reference - __protobuf__ = proto.module( package="google.maps.places.v1", manifest={ @@ -34,33 +32,22 @@ class ContentBlock(proto.Message): r"""A block of content that can be served individually. Attributes: - topic (str): - The topic of the content, for example - "overview" or "restaurant". content (google.type.localized_text_pb2.LocalizedText): Content related to the topic. - references (google.maps.places_v1.types.References): - Experimental: See - https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative - for more details. - - References that are related to this block of - content. + referenced_places (MutableSequence[str]): + The list of resource names of the referenced + places. This name can be used in other APIs that + accept Place resource names. """ - topic: str = proto.Field( - proto.STRING, - number=1, - ) content: localized_text_pb2.LocalizedText = proto.Field( proto.MESSAGE, number=2, message=localized_text_pb2.LocalizedText, ) - references: reference.References = proto.Field( - proto.MESSAGE, - number=3, - message=reference.References, + referenced_places: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, ) diff --git a/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py b/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py index fad28ccf57b6..4ea8eba90b60 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py +++ b/packages/google-maps-places/google/maps/places_v1/types/contextual_content.py @@ -39,11 +39,11 @@ class ContextualContent(proto.Message): Attributes: reviews (MutableSequence[google.maps.places_v1.types.Review]): - List of reviews about this place, contexual + List of reviews about this place, contextual to the place query. photos (MutableSequence[google.maps.places_v1.types.Photo]): Information (including references) about - photos of this place, contexual to the place + photos of this place, contextual to the place query. justifications (MutableSequence[google.maps.places_v1.types.ContextualContent.Justification]): Experimental: See diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py index d153fc52a566..2c11c9aad891 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/place.py +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -106,6 +106,13 @@ class Place(proto.Message): https://developers.google.com/maps/documentation/places/web-service/place-types. The primary type may be missing if the place's primary type is not a supported type. + google_maps_type_label (google.type.localized_text_pb2.LocalizedText): + The type label of the place on Google Maps, localized to the + request language if applicable, for example, "Restaurant", + "Cafe", "Airport", etc. The type label may be different from + the primary type display name and may not be a supported + type in `Places API Place Types + table `__. national_phone_number (str): A human-readable phone number for the place, in national format. @@ -384,6 +391,9 @@ class Place(proto.Message): areas. See address descriptor regional coverage in https://developers.google.com/maps/documentation/geocoding/address-descriptors/coverage. + google_maps_links (google.maps.places_v1.types.Place.GoogleMapsLinks): + Links to trigger different Google Maps + actions. price_range (google.maps.places_v1.types.PriceRange): The price range associated with a Place. review_summary (google.maps.places_v1.types.Place.ReviewSummary): @@ -1011,6 +1021,48 @@ class ContainingPlace(proto.Message): number=2, ) + class GoogleMapsLinks(proto.Message): + r"""Links to trigger different Google Maps actions. + + Attributes: + directions_uri (str): + A link to show the directions to the place. The link only + populates the destination location and uses the default + travel mode ``DRIVE``. + place_uri (str): + A link to show this place. + write_a_review_uri (str): + A link to write a review for this place on + Google Maps. + reviews_uri (str): + A link to show reviews of this place on + Google Maps. + photos_uri (str): + A link to show photos of this place on Google + Maps. + """ + + directions_uri: str = proto.Field( + proto.STRING, + number=1, + ) + place_uri: str = proto.Field( + proto.STRING, + number=2, + ) + write_a_review_uri: str = proto.Field( + proto.STRING, + number=3, + ) + reviews_uri: str = proto.Field( + proto.STRING, + number=4, + ) + photos_uri: str = proto.Field( + proto.STRING, + number=5, + ) + class ReviewSummary(proto.Message): r"""AI-generated summary of the place using user reviews. @@ -1247,6 +1299,11 @@ class Link(proto.Message): number=32, message=localized_text_pb2.LocalizedText, ) + google_maps_type_label: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=96, + message=localized_text_pb2.LocalizedText, + ) national_phone_number: str = proto.Field( proto.STRING, number=7, @@ -1547,6 +1604,11 @@ class Link(proto.Message): number=84, message=gmp_address_descriptor.AddressDescriptor, ) + google_maps_links: GoogleMapsLinks = proto.Field( + proto.MESSAGE, + number=85, + message=GoogleMapsLinks, + ) price_range: gmp_price_range.PriceRange = proto.Field( proto.MESSAGE, number=86, diff --git a/packages/grafeas/grafeas/grafeas_v1/types/common.py b/packages/grafeas/grafeas/grafeas_v1/types/common.py index 45d9b9e23c13..a6fb9af8a819 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/common.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/common.py @@ -268,6 +268,8 @@ class BaseImage(proto.Message): layer_count (int): The number of layers that the base image is composed of. + registry (str): + The registry in which the base image is from. """ name: str = proto.Field( @@ -282,6 +284,10 @@ class BaseImage(proto.Message): proto.INT32, number=3, ) + registry: str = proto.Field( + proto.STRING, + number=4, + ) class LayerDetails(proto.Message): diff --git a/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py index 30678bd58611..9c7df22a2177 100644 --- a/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py +++ b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py @@ -68,6 +68,9 @@ class VulnerabilityNote(proto.Message): cvss_v2 (grafeas.grafeas_v1.types.CVSS): The full description of the v2 CVSS for this vulnerability. + advisory_publish_time (google.protobuf.timestamp_pb2.Timestamp): + The time this advisory was published by the + source. """ class Detail(proto.Message): @@ -301,6 +304,11 @@ class KnowledgeBase(proto.Message): number=8, message=cvss.CVSS, ) + advisory_publish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) class VulnerabilityOccurrence(proto.Message): diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py index 917a674b1fd4..249cc716dfe6 100644 --- a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py @@ -9598,6 +9598,7 @@ def test_create_occurrence_rest_call_success(request_type): "name": "name_value", "repository": "repository_value", "layer_count": 1189, + "registry": "registry_value", } ], }, @@ -10327,6 +10328,7 @@ def test_update_occurrence_rest_call_success(request_type): "name": "name_value", "repository": "repository_value", "layer_count": 1189, + "registry": "registry_value", } ], }, @@ -11436,6 +11438,7 @@ def test_create_note_rest_call_success(request_type): "integrity_impact": 1, "availability_impact": 1, }, + "advisory_publish_time": {}, }, "build": {"builder_version": "builder_version_value"}, "image": { @@ -11950,6 +11953,7 @@ def test_update_note_rest_call_success(request_type): "integrity_impact": 1, "availability_impact": 1, }, + "advisory_publish_time": {}, }, "build": {"builder_version": "builder_version_value"}, "image": {