diff --git a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py index 8d76000284f9..a08ae32f8a1f 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py @@ -61,6 +61,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -74,20 +75,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -106,6 +112,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -120,6 +128,9 @@ Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -129,6 +140,7 @@ PrivatePool, PromoteReleaseOperation, PromoteReleaseRule, + Range, Release, RenderMetadata, RepairMode, @@ -136,6 +148,7 @@ RepairRolloutOperation, RepairRolloutRule, RepairState, + RestrictRollout, Retry, RetryAttempt, RetryJobRequest, @@ -162,16 +175,27 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindow, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, ) +from google.cloud.deploy_v1.types.customtargettype_notification_payload import ( + CustomTargetTypeNotificationEvent, +) from google.cloud.deploy_v1.types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from google.cloud.deploy_v1.types.deploypolicy_evaluation_payload import ( + DeployPolicyEvaluationEvent, +) +from google.cloud.deploy_v1.types.deploypolicy_notification_payload import ( + DeployPolicyNotificationEvent, +) from google.cloud.deploy_v1.types.jobrun_notification_payload import ( JobRunNotificationEvent, ) @@ -228,6 +252,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -241,20 +266,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -273,6 +303,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -287,6 +319,9 @@ "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -296,12 +331,14 @@ "PrivatePool", "PromoteReleaseOperation", "PromoteReleaseRule", + "Range", "Release", "RenderMetadata", "RepairMode", "RepairPhase", "RepairRolloutOperation", "RepairRolloutRule", + "RestrictRollout", "Retry", "RetryAttempt", "RetryJobRequest", @@ -327,16 +364,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindow", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", "BackoffMode", "RepairState", "SkaffoldSupportState", + "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", + "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", "ReleaseNotificationEvent", diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index 68899264edad..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py index fbba3a484180..0f09136048c7 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py @@ -58,6 +58,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -71,20 +72,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -103,6 +109,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -117,6 +125,9 @@ Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -126,6 +137,7 @@ PrivatePool, PromoteReleaseOperation, PromoteReleaseRule, + Range, Release, RenderMetadata, RepairMode, @@ -133,6 +145,7 @@ RepairRolloutOperation, RepairRolloutRule, RepairState, + RestrictRollout, Retry, RetryAttempt, RetryJobRequest, @@ -159,16 +172,23 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindow, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, ) +from .types.customtargettype_notification_payload import ( + CustomTargetTypeNotificationEvent, +) from .types.deliverypipeline_notification_payload import ( DeliveryPipelineNotificationEvent, ) +from .types.deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent +from .types.deploypolicy_notification_payload import DeployPolicyNotificationEvent from .types.jobrun_notification_payload import JobRunNotificationEvent from .types.log_enums import Type from .types.release_notification_payload import ReleaseNotificationEvent @@ -218,6 +238,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -227,18 +248,25 @@ "CustomTargetDeployMetadata", "CustomTargetSkaffoldActions", "CustomTargetType", + "CustomTargetTypeNotificationEvent", "DefaultPool", "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeliveryPipelineNotificationEvent", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeployParameters", + "DeployPolicy", + "DeployPolicyEvaluationEvent", + "DeployPolicyNotificationEvent", + "DeployPolicyResourceSelector", "DeploymentJobs", "ExecutionConfig", "GetAutomationRequest", @@ -246,6 +274,7 @@ "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -265,6 +294,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -279,6 +310,9 @@ "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -288,6 +322,7 @@ "PrivatePool", "PromoteReleaseOperation", "PromoteReleaseRule", + "Range", "Release", "ReleaseNotificationEvent", "ReleaseRenderEvent", @@ -297,6 +332,7 @@ "RepairRolloutOperation", "RepairRolloutRule", "RepairState", + "RestrictRollout", "Retry", "RetryAttempt", "RetryJobRequest", @@ -326,10 +362,12 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindow", "Type", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json index a8eee6244a5f..32200eac6f82 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_metadata.json @@ -50,6 +50,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -80,6 +85,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -110,6 +120,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -155,6 +170,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -205,6 +225,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -255,6 +280,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -285,6 +315,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -315,6 +350,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -360,6 +400,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -410,6 +455,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" @@ -460,6 +510,11 @@ "create_delivery_pipeline" ] }, + "CreateDeployPolicy": { + "methods": [ + "create_deploy_policy" + ] + }, "CreateRelease": { "methods": [ "create_release" @@ -490,6 +545,11 @@ "delete_delivery_pipeline" ] }, + "DeleteDeployPolicy": { + "methods": [ + "delete_deploy_policy" + ] + }, "DeleteTarget": { "methods": [ "delete_target" @@ -520,6 +580,11 @@ "get_delivery_pipeline" ] }, + "GetDeployPolicy": { + "methods": [ + "get_deploy_policy" + ] + }, "GetJobRun": { "methods": [ "get_job_run" @@ -565,6 +630,11 @@ "list_delivery_pipelines" ] }, + "ListDeployPolicies": { + "methods": [ + "list_deploy_policies" + ] + }, "ListJobRuns": { "methods": [ "list_job_runs" @@ -615,6 +685,11 @@ "update_delivery_pipeline" ] }, + "UpdateDeployPolicy": { + "methods": [ + "update_deploy_policy" + ] + }, "UpdateTarget": { "methods": [ "update_target" diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index 68899264edad..558c8aab67c5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 1139d2bb30d7..04cd600c64fb 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -96,6 +96,8 @@ class CloudDeployAsyncClient: parse_delivery_pipeline_path = staticmethod( CloudDeployClient.parse_delivery_pipeline_path ) + deploy_policy_path = staticmethod(CloudDeployClient.deploy_policy_path) + parse_deploy_policy_path = staticmethod(CloudDeployClient.parse_deploy_policy_path) job_path = staticmethod(CloudDeployClient.job_path) parse_job_path = staticmethod(CloudDeployClient.parse_job_path) job_run_path = staticmethod(CloudDeployClient.job_run_path) @@ -583,7 +585,7 @@ async def sample_create_delivery_pipeline(): The request object. The request object for ``CreateDeliveryPipeline``. parent (:class:`str`): Required. The parent collection in which the - ``DeliveryPipeline`` should be created. Format should be + ``DeliveryPipeline`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -728,8 +730,8 @@ async def sample_update_delivery_pipeline(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to specify the fields to be - overwritten in the ``DeliveryPipeline`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``DeliveryPipeline`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -861,7 +863,7 @@ async def sample_delete_delivery_pipeline(): The request object. The request object for ``DeleteDeliveryPipeline``. name (:class:`str`): Required. The name of the ``DeliveryPipeline`` to - delete. Format should be + delete. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``name`` field @@ -1107,7 +1109,7 @@ async def sample_rollback_target(): The request object. The request object for ``RollbackTarget``. name (:class:`str`): Required. The ``DeliveryPipeline`` for which the - rollback ``Rollout`` should be created. Format should be + rollback ``Rollout`` must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``name`` field @@ -1341,7 +1343,7 @@ async def sample_create_target(): The request object. The request object for ``CreateTarget``. parent (:class:`str`): Required. The parent collection in which the ``Target`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -1484,9 +1486,9 @@ async def sample_update_target(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to specify the fields to be - overwritten in the Target resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be + overwritten by the update in the ``Target`` resource. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -1614,8 +1616,8 @@ async def sample_delete_target(): request (Optional[Union[google.cloud.deploy_v1.types.DeleteTargetRequest, dict]]): The request object. The request object for ``DeleteTarget``. name (:class:`str`): - Required. The name of the ``Target`` to delete. Format - should be + Required. The name of the ``Target`` to delete. The + format is ``projects/{project_id}/locations/{location_name}/targets/{target_name}``. This corresponds to the ``name`` field @@ -1984,7 +1986,7 @@ async def sample_create_custom_target_type(): The request object. The request object for ``CreateCustomTargetType``. parent (:class:`str`): Required. The parent collection in which the - ``CustomTargetType`` should be created. Format should be + ``CustomTargetType`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -2135,8 +2137,8 @@ async def sample_update_custom_target_type(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to specify the fields to be - overwritten in the ``CustomTargetType`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``CustomTargetType`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -2626,7 +2628,7 @@ async def sample_create_release(): The request object. The request object for ``CreateRelease``, parent (:class:`str`): Required. The parent collection in which the ``Release`` - should be created. Format should be + is created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``parent`` field @@ -2821,6 +2823,645 @@ async def sample_abandon_release(): # Done; return the response. return response + async def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]]): + The request object. The request object for ``CreateDeployPolicy``. + parent (:class:`str`): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (:class:`str`): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (:class:`google.cloud.deploy_v1.types.DeployPolicy`): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]]): + The request object. The request object for ``DeleteDeployPolicy``. + name (:class:`str`): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesAsyncPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]]): + The request object. The request object for ``ListDeployPolicies``. + parent (:class:`str`): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deploy_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeployPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]]): + The request object. The request object for ``GetDeployPolicy`` + name (:class:`str`): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deploy_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, @@ -3422,7 +4063,7 @@ async def sample_create_rollout(): ``CreateRollout``. parent (:class:`str`): Required. The parent collection in which the ``Rollout`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/releases/{release_name}``. This corresponds to the ``parent`` field @@ -4253,7 +4894,7 @@ async def sample_create_automation(): The request object. The request object for ``CreateAutomation``. parent (:class:`str`): Required. The parent collection in which the - ``Automation`` should be created. Format should be + ``Automation`` must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``parent`` field @@ -4406,8 +5047,8 @@ async def sample_update_automation(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to specify the fields to be - overwritten in the ``Automation`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``Automation`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -4540,8 +5181,8 @@ async def sample_delete_automation(): request (Optional[Union[google.cloud.deploy_v1.types.DeleteAutomationRequest, dict]]): The request object. The request object for ``DeleteAutomation``. name (:class:`str`): - Required. The name of the ``Automation`` to delete. - Format should be + Required. The name of the ``Automation`` to delete. The + format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/automations/{automation_name}``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index 0e1370982e76..7fe18d124a9c 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -349,6 +349,28 @@ def parse_delivery_pipeline_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def deploy_policy_path( + project: str, + location: str, + deploy_policy: str, + ) -> str: + """Returns a fully-qualified deploy_policy string.""" + return "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + + @staticmethod + def parse_deploy_policy_path(path: str) -> Dict[str, str]: + """Parses a deploy_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def job_path( project: str, @@ -1309,7 +1331,7 @@ def sample_create_delivery_pipeline(): The request object. The request object for ``CreateDeliveryPipeline``. parent (str): Required. The parent collection in which the - ``DeliveryPipeline`` should be created. Format should be + ``DeliveryPipeline`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -1451,8 +1473,8 @@ def sample_update_delivery_pipeline(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``DeliveryPipeline`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``DeliveryPipeline`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -1581,7 +1603,7 @@ def sample_delete_delivery_pipeline(): The request object. The request object for ``DeleteDeliveryPipeline``. name (str): Required. The name of the ``DeliveryPipeline`` to - delete. Format should be + delete. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``name`` field @@ -1821,7 +1843,7 @@ def sample_rollback_target(): The request object. The request object for ``RollbackTarget``. name (str): Required. The ``DeliveryPipeline`` for which the - rollback ``Rollout`` should be created. Format should be + rollback ``Rollout`` must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``name`` field @@ -2049,7 +2071,7 @@ def sample_create_target(): The request object. The request object for ``CreateTarget``. parent (str): Required. The parent collection in which the ``Target`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -2189,9 +2211,9 @@ def sample_update_target(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the Target resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be + overwritten by the update in the ``Target`` resource. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -2316,8 +2338,8 @@ def sample_delete_target(): request (Union[google.cloud.deploy_v1.types.DeleteTargetRequest, dict]): The request object. The request object for ``DeleteTarget``. name (str): - Required. The name of the ``Target`` to delete. Format - should be + Required. The name of the ``Target`` to delete. The + format is ``projects/{project_id}/locations/{location_name}/targets/{target_name}``. This corresponds to the ``name`` field @@ -2677,7 +2699,7 @@ def sample_create_custom_target_type(): The request object. The request object for ``CreateCustomTargetType``. parent (str): Required. The parent collection in which the - ``CustomTargetType`` should be created. Format should be + ``CustomTargetType`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field @@ -2827,8 +2849,8 @@ def sample_update_custom_target_type(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``CustomTargetType`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``CustomTargetType`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -3310,7 +3332,7 @@ def sample_create_release(): The request object. The request object for ``CreateRelease``, parent (str): Required. The parent collection in which the ``Release`` - should be created. Format should be + is created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``parent`` field @@ -3499,6 +3521,630 @@ def sample_abandon_release(): # Done; return the response. return response + def create_deploy_policy( + self, + request: Optional[Union[cloud_deploy.CreateDeployPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + deploy_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new DeployPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.CreateDeployPolicyRequest, dict]): + The request object. The request object for ``CreateDeployPolicy``. + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + This corresponds to the ``deploy_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deploy_policy, deploy_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.CreateDeployPolicyRequest): + request = cloud_deploy.CreateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if deploy_policy_id is not None: + request.deploy_policy_id = deploy_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deploy_policy( + self, + request: Optional[Union[cloud_deploy.UpdateDeployPolicyRequest, dict]] = None, + *, + deploy_policy: Optional[cloud_deploy.DeployPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.UpdateDeployPolicyRequest, dict]): + The request object. The request object for ``UpdateDeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + This corresponds to the ``deploy_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.deploy_v1.types.DeployPolicy` A + DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deploy_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.UpdateDeployPolicyRequest): + request = cloud_deploy.UpdateDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deploy_policy is not None: + request.deploy_policy = deploy_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deploy_policy.name", request.deploy_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_deploy.DeployPolicy, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deploy_policy( + self, + request: Optional[Union[cloud_deploy.DeleteDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.DeleteDeployPolicyRequest, dict]): + The request object. The request object for ``DeleteDeployPolicy``. + name (str): + Required. The name of the ``DeployPolicy`` to delete. + The format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.DeleteDeployPolicyRequest): + request = cloud_deploy.DeleteDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_deploy.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_deploy_policies( + self, + request: Optional[Union[cloud_deploy.ListDeployPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeployPoliciesPager: + r"""Lists DeployPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.ListDeployPoliciesRequest, dict]): + The request object. The request object for ``ListDeployPolicies``. + parent (str): + Required. The parent, which owns this collection of + deploy policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager: + The response object from ListDeployPolicies. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.ListDeployPoliciesRequest): + request = cloud_deploy.ListDeployPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deploy_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeployPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deploy_policy( + self, + request: Optional[Union[cloud_deploy.GetDeployPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Gets details of a single DeployPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import deploy_v1 + + def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.deploy_v1.types.GetDeployPolicyRequest, dict]): + The request object. The request object for ``GetDeployPolicy`` + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.deploy_v1.types.DeployPolicy: + A DeployPolicy resource in the Cloud Deploy API. + + A DeployPolicy inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_deploy.GetDeployPolicyRequest): + request = cloud_deploy.GetDeployPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deploy_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def approve_rollout( self, request: Optional[Union[cloud_deploy.ApproveRolloutRequest, dict]] = None, @@ -4085,7 +4731,7 @@ def sample_create_rollout(): ``CreateRollout``. parent (str): Required. The parent collection in which the ``Rollout`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/releases/{release_name}``. This corresponds to the ``parent`` field @@ -4895,7 +5541,7 @@ def sample_create_automation(): The request object. The request object for ``CreateAutomation``. parent (str): Required. The parent collection in which the - ``Automation`` should be created. Format should be + ``Automation`` must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. This corresponds to the ``parent`` field @@ -5045,8 +5691,8 @@ def sample_update_automation(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``Automation`` resource by the - update. The fields specified in the update_mask are + overwritten by the update in the ``Automation`` + resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then all fields are overwritten. @@ -5176,8 +5822,8 @@ def sample_delete_automation(): request (Union[google.cloud.deploy_v1.types.DeleteAutomationRequest, dict]): The request object. The request object for ``DeleteAutomation``. name (str): - Required. The name of the ``Automation`` to delete. - Format should be + Required. The name of the ``Automation`` to delete. The + format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/automations/{automation_name}``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py index bb4c97313cc8..4f206f5f23bd 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/pagers.py @@ -539,6 +539,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListDeployPoliciesPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_deploy.ListDeployPoliciesResponse], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloud_deploy.DeployPolicy]: + for page in self.pages: + yield from page.deploy_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeployPoliciesAsyncPager: + """A pager for iterating through ``list_deploy_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deploy_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployPolicies`` requests and continue to iterate + through the ``deploy_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.deploy_v1.types.ListDeployPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_deploy.ListDeployPoliciesResponse]], + request: cloud_deploy.ListDeployPoliciesRequest, + response: cloud_deploy.ListDeployPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.deploy_v1.types.ListDeployPoliciesRequest): + The initial request object. + response (google.cloud.deploy_v1.types.ListDeployPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_deploy.ListDeployPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloud_deploy.ListDeployPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[cloud_deploy.DeployPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.deploy_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListRolloutsPager: """A pager for iterating through ``list_rollouts`` requests. diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py index d3042176dd81..d52eff30b350 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/base.py @@ -304,6 +304,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method.wrap_method( + self.list_deploy_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method.wrap_method( + self.get_deploy_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method.wrap_method( self.approve_rollout, default_timeout=60.0, @@ -699,6 +742,54 @@ def abandon_release( ]: raise NotImplementedError() + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Union[ + cloud_deploy.ListDeployPoliciesResponse, + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], + Union[cloud_deploy.DeployPolicy, Awaitable[cloud_deploy.DeployPolicy]], + ]: + raise NotImplementedError() + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py index 4ce9240634f7..4590ce080f41 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc.py @@ -808,6 +808,140 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + ~.ListDeployPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + ~.DeployPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py index 62883f74e557..db641300032a 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/grpc_asyncio.py @@ -834,6 +834,148 @@ def abandon_release( ) return self._stubs["abandon_release"] + @property + def create_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.CreateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deploy policy method over gRPC. + + Creates a new DeployPolicy in a given project and + location. + + Returns: + Callable[[~.CreateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deploy_policy" not in self._stubs: + self._stubs["create_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/CreateDeployPolicy", + request_serializer=cloud_deploy.CreateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deploy_policy"] + + @property + def update_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deploy policy method over gRPC. + + Updates the parameters of a single DeployPolicy. + + Returns: + Callable[[~.UpdateDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deploy_policy" not in self._stubs: + self._stubs["update_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/UpdateDeployPolicy", + request_serializer=cloud_deploy.UpdateDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deploy_policy"] + + @property + def delete_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeployPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deploy policy method over gRPC. + + Deletes a single DeployPolicy. + + Returns: + Callable[[~.DeleteDeployPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deploy_policy" not in self._stubs: + self._stubs["delete_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/DeleteDeployPolicy", + request_serializer=cloud_deploy.DeleteDeployPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deploy_policy"] + + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + Awaitable[cloud_deploy.ListDeployPoliciesResponse], + ]: + r"""Return a callable for the list deploy policies method over gRPC. + + Lists DeployPolicies in a given project and location. + + Returns: + Callable[[~.ListDeployPoliciesRequest], + Awaitable[~.ListDeployPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deploy_policies" not in self._stubs: + self._stubs["list_deploy_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/ListDeployPolicies", + request_serializer=cloud_deploy.ListDeployPoliciesRequest.serialize, + response_deserializer=cloud_deploy.ListDeployPoliciesResponse.deserialize, + ) + return self._stubs["list_deploy_policies"] + + @property + def get_deploy_policy( + self, + ) -> Callable[ + [cloud_deploy.GetDeployPolicyRequest], Awaitable[cloud_deploy.DeployPolicy] + ]: + r"""Return a callable for the get deploy policy method over gRPC. + + Gets details of a single DeployPolicy. + + Returns: + Callable[[~.GetDeployPolicyRequest], + Awaitable[~.DeployPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deploy_policy" not in self._stubs: + self._stubs["get_deploy_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.deploy.v1.CloudDeploy/GetDeployPolicy", + request_serializer=cloud_deploy.GetDeployPolicyRequest.serialize, + response_deserializer=cloud_deploy.DeployPolicy.deserialize, + ) + return self._stubs["get_deploy_policy"] + @property def approve_rollout( self, @@ -1577,6 +1719,49 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_deploy_policy: gapic_v1.method_async.wrap_method( + self.create_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.update_deploy_policy: gapic_v1.method_async.wrap_method( + self.update_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_deploy_policy: gapic_v1.method_async.wrap_method( + self.delete_deploy_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.list_deploy_policies: gapic_v1.method_async.wrap_method( + self.list_deploy_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_deploy_policy: gapic_v1.method_async.wrap_method( + self.get_deploy_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.approve_rollout: gapic_v1.method_async.wrap_method( self.approve_rollout, default_timeout=60.0, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index a96dad3bb982..334a7cca95eb 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -138,6 +138,14 @@ def post_create_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_create_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_release(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +194,14 @@ def post_delete_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -234,6 +250,14 @@ def post_get_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_get_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_job_run(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -306,6 +330,14 @@ def post_list_delivery_pipelines(self, response): logging.log(f"Received response: {response}") return response + def pre_list_deploy_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deploy_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_job_runs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -386,6 +418,14 @@ def post_update_delivery_pipeline(self, response): logging.log(f"Received response: {response}") return response + def pre_update_deploy_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deploy_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_target(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -584,6 +624,29 @@ def post_create_delivery_pipeline( """ return response + def pre_create_deploy_policy( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_create_release( self, request: cloud_deploy.CreateReleaseRequest, @@ -722,6 +785,29 @@ def post_delete_delivery_pipeline( """ return response + def pre_delete_deploy_policy( + self, + request: cloud_deploy.DeleteDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_delete_target( self, request: cloud_deploy.DeleteTargetRequest, @@ -858,6 +944,29 @@ def post_get_delivery_pipeline( """ return response + def pre_get_deploy_policy( + self, + request: cloud_deploy.GetDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_deploy_policy( + self, response: cloud_deploy.DeployPolicy + ) -> cloud_deploy.DeployPolicy: + """Post-rpc interceptor for get_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_get_job_run( self, request: cloud_deploy.GetJobRunRequest, @@ -1057,6 +1166,29 @@ def post_list_delivery_pipelines( """ return response + def pre_list_deploy_policies( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListDeployPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_deploy_policies( + self, response: cloud_deploy.ListDeployPoliciesResponse + ) -> cloud_deploy.ListDeployPoliciesResponse: + """Post-rpc interceptor for list_deploy_policies + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_list_job_runs( self, request: cloud_deploy.ListJobRunsRequest, @@ -1285,6 +1417,29 @@ def post_update_delivery_pipeline( """ return response + def pre_update_deploy_policy( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateDeployPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_deploy_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deploy_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + def pre_update_target( self, request: cloud_deploy.UpdateTargetRequest, @@ -2413,6 +2568,103 @@ def __call__( resp = self._interceptor.post_create_delivery_pipeline(resp) return resp + class _CreateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("CreateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deployPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.CreateDeployPolicyRequest): + The request object. The request object for ``CreateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_create_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.CreateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deploy_policy(resp) + return resp + class _CreateRelease(CloudDeployRestStub): def __hash__(self): return hash("CreateRelease") @@ -2963,9 +3215,9 @@ def __call__( resp = self._interceptor.post_delete_delivery_pipeline(resp) return resp - class _DeleteTarget(CloudDeployRestStub): + class _DeleteDeployPolicy(CloudDeployRestStub): def __hash__(self): - return hash("DeleteTarget") + return hash("DeleteDeployPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2979,17 +3231,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.DeleteTargetRequest, + request: cloud_deploy.DeleteDeployPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete target method over HTTP. + r"""Call the delete deploy policy method over HTTP. Args: - request (~.cloud_deploy.DeleteTargetRequest): - The request object. The request object for ``DeleteTarget``. + request (~.cloud_deploy.DeleteDeployPolicyRequest): + The request object. The request object for ``DeleteDeployPolicy``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3007,11 +3259,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/targets/*}", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", }, ] - request, metadata = self._interceptor.pre_delete_target(request, metadata) - pb_request = cloud_deploy.DeleteTargetRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.DeleteDeployPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3046,12 +3300,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_target(resp) + resp = self._interceptor.post_delete_deploy_policy(resp) return resp - class _GetAutomation(CloudDeployRestStub): + class _DeleteTarget(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomation") + return hash("DeleteTarget") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3065,17 +3319,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRequest, + request: cloud_deploy.DeleteTargetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_deploy.Automation: - r"""Call the get automation method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete target method over HTTP. Args: - request (~.cloud_deploy.GetAutomationRequest): - The request object. The request object for ``GetAutomation`` + request (~.cloud_deploy.DeleteTargetRequest): + The request object. The request object for ``DeleteTarget``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,26 +3337,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.cloud_deploy.Automation: - An ``Automation`` resource in the Cloud Deploy API. - - An ``Automation`` enables the automation of manually - driven actions for a Delivery Pipeline, which includes - Release promotion among Targets, Rollout repair and - Rollout deployment strategy advancement. The intention - of Automation is to reduce manual intervention in the - continuous delivery process. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", }, ] - request, metadata = self._interceptor.pre_get_automation(request, metadata) - pb_request = cloud_deploy.GetAutomationRequest.pb(request) + request, metadata = self._interceptor.pre_delete_target(request, metadata) + pb_request = cloud_deploy.DeleteTargetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3135,16 +3384,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = cloud_deploy.Automation() - pb_resp = cloud_deploy.Automation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_automation(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target(resp) return resp - class _GetAutomationRun(CloudDeployRestStub): + class _GetAutomation(CloudDeployRestStub): def __hash__(self): - return hash("GetAutomationRun") + return hash("GetAutomation") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3158,7 +3405,100 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: cloud_deploy.GetAutomationRunRequest, + request: cloud_deploy.GetAutomationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Automation: + r"""Call the get automation method over HTTP. + + Args: + request (~.cloud_deploy.GetAutomationRequest): + The request object. The request object for ``GetAutomation`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Automation: + An ``Automation`` resource in the Cloud Deploy API. + + An ``Automation`` enables the automation of manually + driven actions for a Delivery Pipeline, which includes + Release promotion among Targets, Rollout repair and + Rollout deployment strategy advancement. The intention + of Automation is to reduce manual intervention in the + continuous delivery process. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/automations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_automation(request, metadata) + pb_request = cloud_deploy.GetAutomationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Automation() + pb_resp = cloud_deploy.Automation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_automation(resp) + return resp + + class _GetAutomationRun(CloudDeployRestStub): + def __hash__(self): + return hash("GetAutomationRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetAutomationRunRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, @@ -3502,6 +3842,97 @@ def __call__( resp = self._interceptor.post_get_delivery_pipeline(resp) return resp + class _GetDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("GetDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeployPolicy: + r"""Call the get deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.GetDeployPolicyRequest): + The request object. The request object for ``GetDeployPolicy`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.DeployPolicy: + A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation driven + actions within a Delivery Pipeline or Target. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.GetDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.DeployPolicy() + pb_resp = cloud_deploy.DeployPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deploy_policy(resp) + return resp + class _GetJobRun(CloudDeployRestStub): def __hash__(self): return hash("GetJobRun") @@ -4300,6 +4731,93 @@ def __call__( resp = self._interceptor.post_list_delivery_pipelines(resp) return resp + class _ListDeployPolicies(CloudDeployRestStub): + def __hash__(self): + return hash("ListDeployPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListDeployPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListDeployPoliciesResponse: + r"""Call the list deploy policies method over HTTP. + + Args: + request (~.cloud_deploy.ListDeployPoliciesRequest): + The request object. The request object for ``ListDeployPolicies``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListDeployPoliciesResponse: + The response object from ``ListDeployPolicies``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_deploy_policies( + request, metadata + ) + pb_request = cloud_deploy.ListDeployPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListDeployPoliciesResponse() + pb_resp = cloud_deploy.ListDeployPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deploy_policies(resp) + return resp + class _ListJobRuns(CloudDeployRestStub): def __hash__(self): return hash("ListJobRuns") @@ -5216,6 +5734,103 @@ def __call__( resp = self._interceptor.post_update_delivery_pipeline(resp) return resp + class _UpdateDeployPolicy(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateDeployPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateDeployPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deploy policy method over HTTP. + + Args: + request (~.cloud_deploy.UpdateDeployPolicyRequest): + The request object. The request object for ``UpdateDeployPolicy``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}", + "body": "deploy_policy", + }, + ] + request, metadata = self._interceptor.pre_update_deploy_policy( + request, metadata + ) + pb_request = cloud_deploy.UpdateDeployPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deploy_policy(resp) + return resp + class _UpdateTarget(CloudDeployRestStub): def __hash__(self): return hash("UpdateTarget") @@ -5390,6 +6005,14 @@ def create_delivery_pipeline( # In C++ this would require a dynamic_cast return self._CreateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def create_deploy_policy( + self, + ) -> Callable[[cloud_deploy.CreateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def create_release( self, @@ -5442,6 +6065,14 @@ def delete_delivery_pipeline( # In C++ this would require a dynamic_cast return self._DeleteDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_deploy_policy( + self, + ) -> Callable[[cloud_deploy.DeleteDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def delete_target( self, @@ -5494,6 +6125,14 @@ def get_delivery_pipeline( # In C++ this would require a dynamic_cast return self._GetDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def get_deploy_policy( + self, + ) -> Callable[[cloud_deploy.GetDeployPolicyRequest], cloud_deploy.DeployPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def get_job_run( self, @@ -5577,6 +6216,17 @@ def list_delivery_pipelines( # In C++ this would require a dynamic_cast return self._ListDeliveryPipelines(self._session, self._host, self._interceptor) # type: ignore + @property + def list_deploy_policies( + self, + ) -> Callable[ + [cloud_deploy.ListDeployPoliciesRequest], + cloud_deploy.ListDeployPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def list_job_runs( self, @@ -5669,6 +6319,14 @@ def update_delivery_pipeline( # In C++ this would require a dynamic_cast return self._UpdateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + @property + def update_deploy_policy( + self, + ) -> Callable[[cloud_deploy.UpdateDeployPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def update_target( self, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py index cb6935f686e4..ff3ffdd4bbbf 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py @@ -52,6 +52,7 @@ CreateChildRolloutJobRun, CreateCustomTargetTypeRequest, CreateDeliveryPipelineRequest, + CreateDeployPolicyRequest, CreateReleaseRequest, CreateRolloutRequest, CreateTargetRequest, @@ -65,20 +66,25 @@ DeleteAutomationRequest, DeleteCustomTargetTypeRequest, DeleteDeliveryPipelineRequest, + DeleteDeployPolicyRequest, DeleteTargetRequest, DeliveryPipeline, + DeliveryPipelineAttribute, DeployArtifact, DeployJob, DeployJobRun, DeployJobRunMetadata, DeploymentJobs, DeployParameters, + DeployPolicy, + DeployPolicyResourceSelector, ExecutionConfig, GetAutomationRequest, GetAutomationRunRequest, GetConfigRequest, GetCustomTargetTypeRequest, GetDeliveryPipelineRequest, + GetDeployPolicyRequest, GetJobRunRequest, GetReleaseRequest, GetRolloutRequest, @@ -97,6 +103,8 @@ ListCustomTargetTypesResponse, ListDeliveryPipelinesRequest, ListDeliveryPipelinesResponse, + ListDeployPoliciesRequest, + ListDeployPoliciesResponse, ListJobRunsRequest, ListJobRunsResponse, ListReleasesRequest, @@ -111,6 +119,9 @@ Phase, PipelineCondition, PipelineReadyCondition, + PolicyRule, + PolicyViolation, + PolicyViolationDetails, Postdeploy, PostdeployJob, PostdeployJobRun, @@ -120,6 +131,7 @@ PrivatePool, PromoteReleaseOperation, PromoteReleaseRule, + Range, Release, RenderMetadata, RepairMode, @@ -127,6 +139,7 @@ RepairRolloutOperation, RepairRolloutRule, RepairState, + RestrictRollout, Retry, RetryAttempt, RetryJobRequest, @@ -153,14 +166,19 @@ TargetsTypeCondition, TerminateJobRunRequest, TerminateJobRunResponse, + TimeWindow, UpdateAutomationRequest, UpdateCustomTargetTypeRequest, UpdateDeliveryPipelineRequest, + UpdateDeployPolicyRequest, UpdateTargetRequest, VerifyJob, VerifyJobRun, ) +from .customtargettype_notification_payload import CustomTargetTypeNotificationEvent from .deliverypipeline_notification_payload import DeliveryPipelineNotificationEvent +from .deploypolicy_evaluation_payload import DeployPolicyEvaluationEvent +from .deploypolicy_notification_payload import DeployPolicyNotificationEvent from .jobrun_notification_payload import JobRunNotificationEvent from .log_enums import Type from .release_notification_payload import ReleaseNotificationEvent @@ -207,6 +225,7 @@ "CreateChildRolloutJobRun", "CreateCustomTargetTypeRequest", "CreateDeliveryPipelineRequest", + "CreateDeployPolicyRequest", "CreateReleaseRequest", "CreateRolloutRequest", "CreateTargetRequest", @@ -220,20 +239,25 @@ "DeleteAutomationRequest", "DeleteCustomTargetTypeRequest", "DeleteDeliveryPipelineRequest", + "DeleteDeployPolicyRequest", "DeleteTargetRequest", "DeliveryPipeline", + "DeliveryPipelineAttribute", "DeployArtifact", "DeployJob", "DeployJobRun", "DeployJobRunMetadata", "DeploymentJobs", "DeployParameters", + "DeployPolicy", + "DeployPolicyResourceSelector", "ExecutionConfig", "GetAutomationRequest", "GetAutomationRunRequest", "GetConfigRequest", "GetCustomTargetTypeRequest", "GetDeliveryPipelineRequest", + "GetDeployPolicyRequest", "GetJobRunRequest", "GetReleaseRequest", "GetRolloutRequest", @@ -252,6 +276,8 @@ "ListCustomTargetTypesResponse", "ListDeliveryPipelinesRequest", "ListDeliveryPipelinesResponse", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", "ListJobRunsRequest", "ListJobRunsResponse", "ListReleasesRequest", @@ -266,6 +292,9 @@ "Phase", "PipelineCondition", "PipelineReadyCondition", + "PolicyRule", + "PolicyViolation", + "PolicyViolationDetails", "Postdeploy", "PostdeployJob", "PostdeployJobRun", @@ -275,12 +304,14 @@ "PrivatePool", "PromoteReleaseOperation", "PromoteReleaseRule", + "Range", "Release", "RenderMetadata", "RepairMode", "RepairPhase", "RepairRolloutOperation", "RepairRolloutRule", + "RestrictRollout", "Retry", "RetryAttempt", "RetryJobRequest", @@ -306,16 +337,21 @@ "TargetsTypeCondition", "TerminateJobRunRequest", "TerminateJobRunResponse", + "TimeWindow", "UpdateAutomationRequest", "UpdateCustomTargetTypeRequest", "UpdateDeliveryPipelineRequest", + "UpdateDeployPolicyRequest", "UpdateTargetRequest", "VerifyJob", "VerifyJobRun", "BackoffMode", "RepairState", "SkaffoldSupportState", + "CustomTargetTypeNotificationEvent", "DeliveryPipelineNotificationEvent", + "DeployPolicyEvaluationEvent", + "DeployPolicyNotificationEvent", "JobRunNotificationEvent", "Type", "ReleaseNotificationEvent", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 4379e11d05c5..e8745500bbba 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -21,6 +21,8 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -80,8 +82,23 @@ "CreateCustomTargetTypeRequest", "UpdateCustomTargetTypeRequest", "DeleteCustomTargetTypeRequest", + "DeployPolicy", + "DeployPolicyResourceSelector", + "DeliveryPipelineAttribute", "TargetAttribute", + "PolicyRule", + "RestrictRollout", + "TimeWindow", + "Range", + "PolicyViolation", + "PolicyViolationDetails", "Release", + "CreateDeployPolicyRequest", + "UpdateDeployPolicyRequest", + "DeleteDeployPolicyRequest", + "ListDeployPoliciesRequest", + "ListDeployPoliciesResponse", + "GetDeployPolicyRequest", "BuildArtifact", "TargetArtifact", "DeployArtifact", @@ -602,7 +619,9 @@ class CanaryDeployment(proto.Message): percentages (MutableSequence[int]): Required. The percentage based deployments that will occur as a part of a ``Rollout``. List is expected in ascending - order and each integer n is 0 <= n < 100. + order and each integer n is 0 <= n < 100. If the + GatewayServiceMesh is configured for Kubernetes, then the + range for n is 0 <= n <= 100. verify (bool): Whether to run verify tests after each percentage deployment. @@ -1130,7 +1149,7 @@ class CreateDeliveryPipelineRequest(proto.Message): Attributes: parent (str): Required. The parent collection in which the - ``DeliveryPipeline`` should be created. Format should be + ``DeliveryPipeline`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. delivery_pipeline_id (str): Required. ID of the ``DeliveryPipeline``. @@ -1191,11 +1210,11 @@ class UpdateDeliveryPipelineRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``DeliveryPipeline`` resource by the - update. The fields specified in the update_mask are relative - to the resource, not the full request. A field will be - overwritten if it's in the mask. If the user doesn't provide - a mask then all fields are overwritten. + overwritten by the update in the ``DeliveryPipeline`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field will + be overwritten if it's in the mask. If the user doesn't + provide a mask then all fields are overwritten. delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipeline): Required. The ``DeliveryPipeline`` to update. request_id (str): @@ -1258,7 +1277,7 @@ class DeleteDeliveryPipelineRequest(proto.Message): Attributes: name (str): Required. The name of the ``DeliveryPipeline`` to delete. - Format should be + The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. request_id (str): Optional. A request ID to identify requests. @@ -1353,7 +1372,7 @@ class RollbackTargetRequest(proto.Message): Attributes: name (str): Required. The ``DeliveryPipeline`` for which the rollback - ``Rollout`` should be created. Format should be + ``Rollout`` must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. target_id (str): Required. ID of the ``Target`` that is being rolled back. @@ -1371,6 +1390,9 @@ class RollbackTargetRequest(proto.Message): validate_only (bool): Optional. If set to true, the request is validated and the user is provided with a ``RollbackTargetResponse``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deploy_policy}``. """ name: str = proto.Field( @@ -1402,6 +1424,10 @@ class RollbackTargetRequest(proto.Message): proto.BOOL, number=7, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) class RollbackTargetResponse(proto.Message): @@ -1802,6 +1828,10 @@ class GkeCluster(proto.Message): Only specify this option when ``cluster`` is a `private GKE cluster `__. + proxy_url (str): + Optional. If set, used to configure a + `proxy `__ + to the Kubernetes server. """ cluster: str = proto.Field( @@ -1812,6 +1842,10 @@ class GkeCluster(proto.Message): proto.BOOL, number=2, ) + proxy_url: str = proto.Field( + proto.STRING, + number=3, + ) class AnthosCluster(proto.Message): @@ -1979,8 +2013,8 @@ class CreateTargetRequest(proto.Message): Attributes: parent (str): - Required. The parent collection in which the ``Target`` - should be created. Format should be + Required. The parent collection in which the ``Target`` must + be created. The format is ``projects/{project_id}/locations/{location_name}``. target_id (str): Required. ID of the ``Target``. @@ -2041,11 +2075,11 @@ class UpdateTargetRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the Target resource by the update. The fields - specified in the update_mask are relative to the resource, - not the full request. A field will be overwritten if it's in - the mask. If the user doesn't provide a mask then all fields - are overwritten. + overwritten by the update in the ``Target`` resource. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. target (google.cloud.deploy_v1.types.Target): Required. The ``Target`` to update. request_id (str): @@ -2106,8 +2140,8 @@ class DeleteTargetRequest(proto.Message): Attributes: name (str): - Required. The name of the ``Target`` to delete. Format - should be + Required. The name of the ``Target`` to delete. The format + is ``projects/{project_id}/locations/{location_name}/targets/{target_name}``. request_id (str): Optional. A request ID to identify requests. @@ -2551,7 +2585,7 @@ class CreateCustomTargetTypeRequest(proto.Message): Attributes: parent (str): Required. The parent collection in which the - ``CustomTargetType`` should be created. Format should be + ``CustomTargetType`` must be created. The format is ``projects/{project_id}/locations/{location_name}``. custom_target_type_id (str): Required. ID of the ``CustomTargetType``. @@ -2612,11 +2646,11 @@ class UpdateCustomTargetTypeRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``CustomTargetType`` resource by the - update. The fields specified in the update_mask are relative - to the resource, not the full request. A field will be - overwritten if it's in the mask. If the user doesn't provide - a mask then all fields are overwritten. + overwritten by the update in the ``CustomTargetType`` + resource. The fields specified in the update_mask are + relative to the resource, not the full request. A field will + be overwritten if it's in the mask. If the user doesn't + provide a mask then all fields are overwritten. custom_target_type (google.cloud.deploy_v1.types.CustomTargetType): Required. The ``CustomTargetType`` to update. request_id (str): @@ -2737,6 +2771,210 @@ class DeleteCustomTargetTypeRequest(proto.Message): ) +class DeployPolicy(proto.Message): + r"""A ``DeployPolicy`` resource in the Cloud Deploy API. + + A ``DeployPolicy`` inhibits manual or automation driven actions + within a Delivery Pipeline or Target. + + Attributes: + name (str): + Output only. Name of the ``DeployPolicy``. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. + The ``deployPolicy`` component must match + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` + uid (str): + Output only. Unique identifier of the ``DeployPolicy``. + description (str): + Description of the ``DeployPolicy``. Max length is 255 + characters. + annotations (MutableMapping[str, str]): + User annotations. These attributes can only be set and used + by the user, and not by Cloud Deploy. Annotations must meet + the following constraints: + + - Annotations are key/value pairs. + - Valid annotation keys have two segments: an optional + prefix and name, separated by a slash (``/``). + - The name segment is required and must be 63 characters or + less, beginning and ending with an alphanumeric character + (``[a-z0-9A-Z]``) with dashes (``-``), underscores + (``_``), dots (``.``), and alphanumerics between. + - The prefix is optional. If specified, the prefix must be + a DNS subdomain: a series of DNS labels separated by + dots(\ ``.``), not longer than 253 characters in total, + followed by a slash (``/``). + + See + https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set + for more details. + labels (MutableMapping[str, str]): + Labels are attributes that can be set and used by both the + user and by Cloud Deploy. Labels must meet the following + constraints: + + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. + - All characters must use UTF-8 encoding, and international + characters are allowed. + - Keys must start with a lowercase letter or international + character. + - Each resource is limited to a maximum of 64 labels. + + Both keys and values are additionally constrained to be <= + 128 bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the deploy policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Most recent time at which the + deploy policy was updated. + suspended (bool): + When suspended, the policy will not prevent + actions from occurring, even if the action + violates the policy. + selectors (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyResourceSelector]): + Required. Selected resources to which the + policy will be applied. At least one selector is + required. If one selector matches the resource + the policy applies. For example, if there are + two selectors and the action being attempted + matches one of them, the policy will apply to + that action. + rules (MutableSequence[google.cloud.deploy_v1.types.PolicyRule]): + Required. Rules to apply. At least one rule + must be present. + etag (str): + The weak etag of the ``Automation`` resource. This checksum + is computed by the server based on the value of other + fields, and may be sent on update and delete requests to + ensure the client has an up-to-date value before proceeding. + """ + + class Invoker(proto.Enum): + r"""What invoked the action. Filters enforcing the policy + depending on what invoked the action. + + Values: + INVOKER_UNSPECIFIED (0): + Unspecified. + USER (1): + The action is user-driven. For example, + creating a rollout manually via a gcloud create + command. + DEPLOY_AUTOMATION (2): + Automated action by Cloud Deploy. + """ + INVOKER_UNSPECIFIED = 0 + USER = 1 + DEPLOY_AUTOMATION = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=8, + ) + selectors: MutableSequence["DeployPolicyResourceSelector"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="DeployPolicyResourceSelector", + ) + rules: MutableSequence["PolicyRule"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="PolicyRule", + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class DeployPolicyResourceSelector(proto.Message): + r"""Contains information on the resources to select for a deploy + policy. Attributes provided must all match the resource in order + for policy restrictions to apply. For example, if delivery + pipelines attributes given are an id "prod" and labels "foo: + bar", a delivery pipeline resource must match both that id and + have that label in order to be subject to the policy. + + Attributes: + delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipelineAttribute): + Optional. Contains attributes about a + delivery pipeline. + target (google.cloud.deploy_v1.types.TargetAttribute): + Optional. Contains attributes about a target. + """ + + delivery_pipeline: "DeliveryPipelineAttribute" = proto.Field( + proto.MESSAGE, + number=1, + message="DeliveryPipelineAttribute", + ) + target: "TargetAttribute" = proto.Field( + proto.MESSAGE, + number=2, + message="TargetAttribute", + ) + + +class DeliveryPipelineAttribute(proto.Message): + r"""Contains criteria for selecting DeliveryPipelines. + + Attributes: + id (str): + ID of the ``DeliveryPipeline``. The value of this field + could be one of the following: + + - The last segment of a pipeline name. It only needs the ID + to determine which pipeline is being referred to + - "*", all delivery pipelines in a location. + labels (MutableMapping[str, str]): + DeliveryPipeline labels. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + class TargetAttribute(proto.Message): r"""Contains criteria for selecting Targets. @@ -2763,6 +3001,217 @@ class TargetAttribute(proto.Message): ) +class PolicyRule(proto.Message): + r"""Rule to apply. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + restrict_rollouts (google.cloud.deploy_v1.types.RestrictRollout): + Rollout restrictions. + + This field is a member of `oneof`_ ``rule``. + """ + + restrict_rollouts: "RestrictRollout" = proto.Field( + proto.MESSAGE, + number=1, + oneof="rule", + message="RestrictRollout", + ) + + +class RestrictRollout(proto.Message): + r"""Rollout restrictions. + + Attributes: + id (str): + Optional. Restriction rule ID. Required and must be unique + within a DeployPolicy. The format is + ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``. + invokers (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy.Invoker]): + Optional. What invoked the action. If left + empty, all invoker types will be restricted. + actions (MutableSequence[google.cloud.deploy_v1.types.RestrictRollout.Actions]): + Rollout actions to be restricted as part of + the policy. If left empty, all actions will be + restricted. + time_window (google.cloud.deploy_v1.types.TimeWindow): + Required. Time window within which actions + are restricted. + """ + + class Actions(proto.Enum): + r"""Rollout actions to be restricted as part of the policy. + + Values: + ACTIONS_UNSPECIFIED (0): + Unspecified. + ADVANCE (1): + Advance the rollout to the next phase. + APPROVE (2): + Approve the rollout. + CANCEL (3): + Cancel the rollout. + CREATE (4): + Create a rollout. + IGNORE_JOB (5): + Ignore a job result on the rollout. + RETRY_JOB (6): + Retry a job for a rollout. + ROLLBACK (7): + Rollback a rollout. + TERMINATE_JOBRUN (8): + Terminate a jobrun. + """ + ACTIONS_UNSPECIFIED = 0 + ADVANCE = 1 + APPROVE = 2 + CANCEL = 3 + CREATE = 4 + IGNORE_JOB = 5 + RETRY_JOB = 6 + ROLLBACK = 7 + TERMINATE_JOBRUN = 8 + + id: str = proto.Field( + proto.STRING, + number=5, + ) + invokers: MutableSequence["DeployPolicy.Invoker"] = proto.RepeatedField( + proto.ENUM, + number=6, + enum="DeployPolicy.Invoker", + ) + actions: MutableSequence[Actions] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=Actions, + ) + time_window: "TimeWindow" = proto.Field( + proto.MESSAGE, + number=4, + message="TimeWindow", + ) + + +class TimeWindow(proto.Message): + r"""Time window within which actions are restricted. + + Attributes: + time_zone (str): + Required. The time zone in IANA format `IANA Time Zone + Database `__ (e.g. + America/New_York). + ranges (MutableSequence[google.cloud.deploy_v1.types.Range]): + Required. Range within which actions are + restricted. + """ + + time_zone: str = proto.Field( + proto.STRING, + number=1, + ) + ranges: MutableSequence["Range"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Range", + ) + + +class Range(proto.Message): + r"""Range within which actions are restricted. + + Attributes: + start_date (google.type.date_pb2.Date): + Start date. + end_date (google.type.date_pb2.Date): + End date. + start_time_of_day (google.type.timeofday_pb2.TimeOfDay): + Start time of day. + end_time_of_day (google.type.timeofday_pb2.TimeOfDay): + End time of day. + day_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Days of week. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + start_time_of_day: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + end_time_of_day: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + day_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class PolicyViolation(proto.Message): + r"""Returned from an action if one or more policies were + violated, and therefore the action was prevented. Contains + information about what policies were violated and why. + + Attributes: + policy_violation_details (MutableSequence[google.cloud.deploy_v1.types.PolicyViolationDetails]): + Policy violation details. + """ + + policy_violation_details: MutableSequence[ + "PolicyViolationDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PolicyViolationDetails", + ) + + +class PolicyViolationDetails(proto.Message): + r"""Policy violation details. + + Attributes: + policy (str): + Name of the policy that was violated. Policy resource will + be in the format of + ``projects/{project}/locations/{location}/policies/{policy}``. + rule_id (str): + Id of the rule that triggered the policy + violation. + failure_message (str): + User readable message about why the request + violated a policy. This is not intended for + machine parsing. + """ + + policy: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + failure_message: str = proto.Field( + proto.STRING, + number=3, + ) + + class Release(proto.Message): r"""A ``Release`` resource in the Cloud Deploy API. @@ -3061,124 +3510,417 @@ class ReleaseCondition(proto.Message): release's Skaffold version. """ - release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( - proto.MESSAGE, - number=1, - message="Release.ReleaseReadyCondition", - ) - skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( - proto.Field( - proto.MESSAGE, - number=2, - message="Release.SkaffoldSupportedCondition", - ) - ) + release_ready_condition: "Release.ReleaseReadyCondition" = proto.Field( + proto.MESSAGE, + number=1, + message="Release.ReleaseReadyCondition", + ) + skaffold_supported_condition: "Release.SkaffoldSupportedCondition" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Release.SkaffoldSupportedCondition", + ) + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + abandoned: bool = proto.Field( + proto.BOOL, + number=23, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + render_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + render_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + skaffold_config_uri: str = proto.Field( + proto.STRING, + number=17, + ) + skaffold_config_path: str = proto.Field( + proto.STRING, + number=9, + ) + build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="BuildArtifact", + ) + delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( + proto.MESSAGE, + number=11, + message="DeliveryPipeline", + ) + target_snapshots: MutableSequence["Target"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Target", + ) + custom_target_type_snapshots: MutableSequence[ + "CustomTargetType" + ] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message="CustomTargetType", + ) + render_state: RenderState = proto.Field( + proto.ENUM, + number=13, + enum=RenderState, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + skaffold_version: str = proto.Field( + proto.STRING, + number=19, + ) + target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=20, + message="TargetArtifact", + ) + target_renders: MutableMapping[str, TargetRender] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=22, + message=TargetRender, + ) + condition: ReleaseCondition = proto.Field( + proto.MESSAGE, + number=24, + message=ReleaseCondition, + ) + deploy_parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) + + +class CreateDeployPolicyRequest(proto.Message): + r"""The request object for ``CreateDeployPolicy``. + + Attributes: + parent (str): + Required. The parent collection in which the + ``DeployPolicy`` must be created. The format is + ``projects/{project_id}/locations/{location_name}``. + deploy_policy_id (str): + Required. ID of the ``DeployPolicy``. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to create. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deploy_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="DeployPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateDeployPolicyRequest(proto.Message): + r"""The request object for ``UpdateDeployPolicy``. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten by the update in the ``DeployPolicy`` resource. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. + deploy_policy (google.cloud.deploy_v1.types.DeployPolicy): + Required. The ``DeployPolicy`` to update. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, updating a ``DeployPolicy`` that + does not exist will result in the creation of a new + ``DeployPolicy``. + validate_only (bool): + Optional. If set to true, the request is + validated and the user is provided with an + expected result, but no actual change is made. + """ - name: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, ) - uid: str = proto.Field( - proto.STRING, + deploy_policy: "DeployPolicy" = proto.Field( + proto.MESSAGE, number=2, + message="DeployPolicy", ) - description: str = proto.Field( + request_id: str = proto.Field( proto.STRING, number=3, ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, + allow_missing: bool = proto.Field( + proto.BOOL, number=4, ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - abandoned: bool = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, - number=23, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - render_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - render_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, + number=5, ) - skaffold_config_uri: str = proto.Field( + + +class DeleteDeployPolicyRequest(proto.Message): + r"""The request object for ``DeleteDeployPolicy``. + + Attributes: + name (str): + Required. The name of the ``DeployPolicy`` to delete. The + format is + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. If set to true, then deleting an already deleted + or non-existing ``DeployPolicy`` will succeed. + validate_only (bool): + Optional. If set, validate the request and + preview the review, but do not actually post it. + etag (str): + Optional. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( proto.STRING, - number=17, + number=1, ) - skaffold_config_path: str = proto.Field( + request_id: str = proto.Field( proto.STRING, - number=9, + number=2, ) - build_artifacts: MutableSequence["BuildArtifact"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="BuildArtifact", + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, ) - delivery_pipeline_snapshot: "DeliveryPipeline" = proto.Field( - proto.MESSAGE, - number=11, - message="DeliveryPipeline", + validate_only: bool = proto.Field( + proto.BOOL, + number=4, ) - target_snapshots: MutableSequence["Target"] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message="Target", + etag: str = proto.Field( + proto.STRING, + number=5, ) - custom_target_type_snapshots: MutableSequence[ - "CustomTargetType" - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message="CustomTargetType", + + +class ListDeployPoliciesRequest(proto.Message): + r"""The request object for ``ListDeployPolicies``. + + Attributes: + parent (str): + Required. The parent, which owns this collection of deploy + policies. Format must be + ``projects/{project_id}/locations/{location_name}``. + page_size (int): + The maximum number of deploy policies to + return. The service may return fewer than this + value. If unspecified, at most 50 deploy + policies will be returned. The maximum value is + 1000; values above 1000 will be set to 1000. + page_token (str): + A page token, received from a previous + ``ListDeployPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other provided parameters match the + call that provided the page token. + filter (str): + Filter deploy policies to be returned. See + https://google.aip.dev/160 for more details. All + fields can be used in the filter. + order_by (str): + Field to sort by. See + https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, ) - render_state: RenderState = proto.Field( - proto.ENUM, - number=13, - enum=RenderState, + page_size: int = proto.Field( + proto.INT32, + number=2, ) - etag: str = proto.Field( + page_token: str = proto.Field( proto.STRING, - number=16, + number=3, ) - skaffold_version: str = proto.Field( + filter: str = proto.Field( proto.STRING, - number=19, + number=4, ) - target_artifacts: MutableMapping[str, "TargetArtifact"] = proto.MapField( + order_by: str = proto.Field( proto.STRING, - proto.MESSAGE, - number=20, - message="TargetArtifact", + number=5, ) - target_renders: MutableMapping[str, TargetRender] = proto.MapField( - proto.STRING, + + +class ListDeployPoliciesResponse(proto.Message): + r"""The response object from ``ListDeployPolicies``. + + Attributes: + deploy_policies (MutableSequence[google.cloud.deploy_v1.types.DeployPolicy]): + The ``DeployPolicy`` objects. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deploy_policies: MutableSequence["DeployPolicy"] = proto.RepeatedField( proto.MESSAGE, - number=22, - message=TargetRender, + number=1, + message="DeployPolicy", ) - condition: ReleaseCondition = proto.Field( - proto.MESSAGE, - number=24, - message=ReleaseCondition, + next_page_token: str = proto.Field( + proto.STRING, + number=2, ) - deploy_parameters: MutableMapping[str, str] = proto.MapField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, + number=3, + ) + + +class GetDeployPolicyRequest(proto.Message): + r"""The request object for ``GetDeployPolicy`` + + Attributes: + name (str): + Required. Name of the ``DeployPolicy``. Format must be + ``projects/{project_id}/locations/{location_name}/deployPolicies/{deploy_policy_name}``. + """ + + name: str = proto.Field( proto.STRING, - number=25, + number=1, ) @@ -3450,8 +4192,8 @@ class CreateReleaseRequest(proto.Message): Attributes: parent (str): - Required. The parent collection in which the ``Release`` - should be created. Format should be + Required. The parent collection in which the ``Release`` is + created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. release_id (str): Required. ID of the ``Release``. @@ -3481,6 +4223,9 @@ class CreateReleaseRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ parent: str = proto.Field( @@ -3504,6 +4249,10 @@ class CreateReleaseRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class Rollout(proto.Message): @@ -3928,13 +4677,13 @@ class AutomationRolloutMetadata(proto.Message): Attributes: promote_automation_run (str): - Output only. The ID of the AutomationRun + Output only. The name of the AutomationRun initiated by a promote release rule. advance_automation_runs (MutableSequence[str]): - Output only. The IDs of the AutomationRuns + Output only. The names of the AutomationRuns initiated by an advance rollout rule. repair_automation_runs (MutableSequence[str]): - Output only. The IDs of the AutomationRuns + Output only. The names of the AutomationRuns initiated by a repair rollout rule. """ @@ -4404,7 +5153,7 @@ class CreateRolloutRequest(proto.Message): Attributes: parent (str): Required. The parent collection in which the ``Rollout`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/releases/{release_name}``. rollout_id (str): Required. ID of the ``Rollout``. @@ -4434,6 +5183,9 @@ class CreateRolloutRequest(proto.Message): Optional. If set to true, the request is validated and the user is provided with an expected result, but no actual change is made. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. starting_phase_id (str): Optional. The starting phase ID for the ``Rollout``. If empty the ``Rollout`` will start at the first phase. @@ -4460,6 +5212,10 @@ class CreateRolloutRequest(proto.Message): proto.BOOL, number=5, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) starting_phase_id: str = proto.Field( proto.STRING, number=7, @@ -4537,6 +5293,9 @@ class ApproveRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. approved (bool): Required. True = approve; false = reject + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4547,6 +5306,10 @@ class ApproveRolloutRequest(proto.Message): proto.BOOL, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class ApproveRolloutResponse(proto.Message): @@ -4562,6 +5325,9 @@ class AdvanceRolloutRequest(proto.Message): ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. phase_id (str): Required. The phase ID to advance the ``Rollout`` to. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( @@ -4572,6 +5338,10 @@ class AdvanceRolloutRequest(proto.Message): proto.STRING, number=2, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class AdvanceRolloutResponse(proto.Message): @@ -4585,12 +5355,19 @@ class CancelRolloutRequest(proto.Message): name (str): Required. Name of the Rollout. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class CancelRolloutResponse(proto.Message): @@ -4609,6 +5386,9 @@ class IgnoreJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to ignore. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4623,6 +5403,10 @@ class IgnoreJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class IgnoreJobResponse(proto.Message): @@ -4641,6 +5425,9 @@ class RetryJobRequest(proto.Message): belongs to. job_id (str): Required. The job ID for the Job to retry. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ rollout: str = proto.Field( @@ -4655,6 +5442,10 @@ class RetryJobRequest(proto.Message): proto.STRING, number=3, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class RetryJobResponse(proto.Message): @@ -5287,12 +6078,19 @@ class TerminateJobRunRequest(proto.Message): name (str): Required. Name of the ``JobRun``. Format must be ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{jobRun}``. + override_deploy_policy (MutableSequence[str]): + Optional. Deploy policies to override. Format is + ``projects/{project}/locations/{location}/deployPolicies/{deployPolicy}``. """ name: str = proto.Field( proto.STRING, number=1, ) + override_deploy_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) class TerminateJobRunResponse(proto.Message): @@ -5585,8 +6383,8 @@ class AutomationRule(proto.Message): class PromoteReleaseRule(proto.Message): - r"""``PromoteRelease`` rule will automatically promote a release from - the current target to a specified target. + r"""The ``PromoteRelease`` rule will automatically promote a release + from the current target to a specified target. Attributes: id (str): @@ -5849,7 +6647,7 @@ class CreateAutomationRequest(proto.Message): Attributes: parent (str): Required. The parent collection in which the ``Automation`` - should be created. Format should be + must be created. The format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}``. automation_id (str): Required. ID of the ``Automation``. @@ -5910,7 +6708,7 @@ class UpdateAutomationRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``Automation`` resource by the update. + overwritten by the update in the ``Automation`` resource. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it's in the mask. If the user doesn't provide a mask then @@ -5976,8 +6774,8 @@ class DeleteAutomationRequest(proto.Message): Attributes: name (str): - Required. The name of the ``Automation`` to delete. Format - should be + Required. The name of the ``Automation`` to delete. The + format is ``projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/automations/{automation_name}``. request_id (str): Optional. A request ID to identify requests. @@ -6179,6 +6977,9 @@ class AutomationRun(proto.Message): Output only. Explains the current state of the ``AutomationRun``. Present only when an explanation is needed. + policy_violation (google.cloud.deploy_v1.types.PolicyViolation): + Output only. Contains information about what policies + prevented the ``AutomationRun`` to proceed. expire_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the ``AutomationRun`` expires. An ``AutomationRun`` expires after 14 days from its creation @@ -6225,6 +7026,8 @@ class State(proto.Enum): The ``AutomationRun`` is in progress. PENDING (5): The ``AutomationRun`` is pending. + ABORTED (6): + The ``AutomationRun`` was aborted. """ STATE_UNSPECIFIED = 0 SUCCEEDED = 1 @@ -6232,6 +7035,7 @@ class State(proto.Enum): FAILED = 3 IN_PROGRESS = 4 PENDING = 5 + ABORTED = 6 name: str = proto.Field( proto.STRING, @@ -6273,6 +7077,11 @@ class State(proto.Enum): proto.STRING, number=9, ) + policy_violation: "PolicyViolation" = proto.Field( + proto.MESSAGE, + number=10, + message="PolicyViolation", + ) expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, @@ -6423,7 +7232,7 @@ class RepairRolloutOperation(proto.Message): class RepairPhase(proto.Message): r"""RepairPhase tracks the repair attempts that have been made for each - ``RepairMode`` specified in the ``Automation`` resource. + ``RepairPhaseConfig`` specified in the ``Automation`` resource. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/customtargettype_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/customtargettype_notification_payload.py new file mode 100644 index 000000000000..07a38daf2b98 --- /dev/null +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/customtargettype_notification_payload.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.deploy_v1.types import log_enums + +__protobuf__ = proto.module( + package="google.cloud.deploy.v1", + manifest={ + "CustomTargetTypeNotificationEvent", + }, +) + + +class CustomTargetTypeNotificationEvent(proto.Message): + r"""Payload proto for + "clouddeploy.googleapis.com/customtargettype_notification" Platform + Log event that describes the failure to send a custom target type + status change Pub/Sub notification. + + Attributes: + message (str): + Debug message for when a notification fails + to send. + custom_target_type_uid (str): + Unique identifier of the ``CustomTargetType``. + custom_target_type (str): + The name of the ``CustomTargetType``. + type_ (google.cloud.deploy_v1.types.Type): + Type of this notification, e.g. for a Pub/Sub + failure. + """ + + message: str = proto.Field( + proto.STRING, + number=1, + ) + custom_target_type_uid: str = proto.Field( + proto.STRING, + number=4, + ) + custom_target_type: str = proto.Field( + proto.STRING, + number=2, + ) + type_: log_enums.Type = proto.Field( + proto.ENUM, + number=3, + enum=log_enums.Type, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_evaluation_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_evaluation_payload.py new file mode 100644 index 000000000000..b10c5f6736fd --- /dev/null +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_evaluation_payload.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.deploy_v1.types import cloud_deploy + +__protobuf__ = proto.module( + package="google.cloud.deploy.v1", + manifest={ + "DeployPolicyEvaluationEvent", + }, +) + + +class DeployPolicyEvaluationEvent(proto.Message): + r"""Payload proto for + "clouddeploy.googleapis.com/deploypolicy_evaluation" Platform Log + event that describes the deploy policy evaluation event. + + Attributes: + message (str): + Debug message for when a deploy policy event + occurs. + rule_type (str): + Rule type (e.g. Restrict Rollouts). + rule (str): + Rule id. + pipeline_uid (str): + Unique identifier of the ``Delivery Pipeline``. + delivery_pipeline (str): + The name of the ``Delivery Pipeline``. + target_uid (str): + Unique identifier of the ``Target``. This is an optional + field, as a ``Target`` may not always be applicable to a + policy. + target (str): + The name of the ``Target``. This is an optional field, as a + ``Target`` may not always be applicable to a policy. + invoker (google.cloud.deploy_v1.types.DeployPolicy.Invoker): + What invoked the action (e.g. a user or + automation). + deploy_policy (str): + The name of the ``DeployPolicy``. + deploy_policy_uid (str): + Unique identifier of the ``DeployPolicy``. + allowed (bool): + Whether the request is allowed. Allowed is + set as true if: (1) the request complies with + the policy; or (2) the request doesn't comply + with the policy but the policy was overridden; + or + (3) the request doesn't comply with the policy + but the policy was suspended + verdict (google.cloud.deploy_v1.types.DeployPolicyEvaluationEvent.PolicyVerdict): + The policy verdict of the request. + overrides (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyEvaluationEvent.PolicyVerdictOverride]): + Things that could have overridden the policy + verdict. Overrides together with verdict decide + whether the request is allowed. + """ + + class PolicyVerdict(proto.Enum): + r"""The policy verdict of the request. + + Values: + POLICY_VERDICT_UNSPECIFIED (0): + This should never happen. + ALLOWED_BY_POLICY (1): + Allowed by policy. This enum value is not + currently used but may be used in the future. + Currently logs are only generated when a request + is denied by policy. + DENIED_BY_POLICY (2): + Denied by policy. + """ + POLICY_VERDICT_UNSPECIFIED = 0 + ALLOWED_BY_POLICY = 1 + DENIED_BY_POLICY = 2 + + class PolicyVerdictOverride(proto.Enum): + r"""Things that could have overridden the policy verdict. When overrides + are used, the request will be allowed even if it is + DENIED_BY_POLICY. + + Values: + POLICY_VERDICT_OVERRIDE_UNSPECIFIED (0): + This should never happen. + POLICY_OVERRIDDEN (1): + The policy was overridden. + POLICY_SUSPENDED (2): + The policy was suspended. + """ + POLICY_VERDICT_OVERRIDE_UNSPECIFIED = 0 + POLICY_OVERRIDDEN = 1 + POLICY_SUSPENDED = 2 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + rule_type: str = proto.Field( + proto.STRING, + number=2, + ) + rule: str = proto.Field( + proto.STRING, + number=3, + ) + pipeline_uid: str = proto.Field( + proto.STRING, + number=4, + ) + delivery_pipeline: str = proto.Field( + proto.STRING, + number=5, + ) + target_uid: str = proto.Field( + proto.STRING, + number=6, + ) + target: str = proto.Field( + proto.STRING, + number=7, + ) + invoker: cloud_deploy.DeployPolicy.Invoker = proto.Field( + proto.ENUM, + number=8, + enum=cloud_deploy.DeployPolicy.Invoker, + ) + deploy_policy: str = proto.Field( + proto.STRING, + number=9, + ) + deploy_policy_uid: str = proto.Field( + proto.STRING, + number=10, + ) + allowed: bool = proto.Field( + proto.BOOL, + number=11, + ) + verdict: PolicyVerdict = proto.Field( + proto.ENUM, + number=12, + enum=PolicyVerdict, + ) + overrides: MutableSequence[PolicyVerdictOverride] = proto.RepeatedField( + proto.ENUM, + number=13, + enum=PolicyVerdictOverride, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_notification_payload.py new file mode 100644 index 000000000000..71a0de5d66d3 --- /dev/null +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deploypolicy_notification_payload.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.deploy_v1.types import log_enums + +__protobuf__ = proto.module( + package="google.cloud.deploy.v1", + manifest={ + "DeployPolicyNotificationEvent", + }, +) + + +class DeployPolicyNotificationEvent(proto.Message): + r"""Payload proto for + "clouddeploy.googleapis.com/deploypolicy_notification". Platform Log + event that describes the failure to send a pub/sub notification when + there is a DeployPolicy status change. + + Attributes: + message (str): + Debug message for when a deploy policy fails + to send a pub/sub notification. + deploy_policy (str): + The name of the ``DeployPolicy``. + deploy_policy_uid (str): + Unique identifier of the deploy policy. + type_ (google.cloud.deploy_v1.types.Type): + Type of this notification, e.g. for a Pub/Sub + failure. + """ + + message: str = proto.Field( + proto.STRING, + number=1, + ) + deploy_policy: str = proto.Field( + proto.STRING, + number=2, + ) + deploy_policy_uid: str = proto.Field( + proto.STRING, + number=3, + ) + type_: log_enums.Type = proto.Field( + proto.ENUM, + number=4, + enum=log_enums.Type, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py new file mode 100644 index 000000000000..1d0ed0d50f40 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py new file mode 100644 index 000000000000..79b708a86bc0 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_create_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.create_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py new file mode 100644 index 000000000000..6f6545a0350c --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py new file mode 100644 index 000000000000..1c9d7dd414fe --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_delete_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.DeleteDeployPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py new file mode 100644 index 000000000000..a96e7a4309c2 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py new file mode 100644 index 000000000000..669a50729182 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_get_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.GetDeployPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_deploy_policy(request=request) + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py new file mode 100644 index 000000000000..f3932c8119a1 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py new file mode 100644 index 000000000000..8c6baf6b8c95 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_list_deploy_policies(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + request = deploy_v1.ListDeployPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deploy_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py new file mode 100644 index 000000000000..1ae30fb1d1ea --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +async def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployAsyncClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async] diff --git a/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py new file mode 100644 index 000000000000..0c205dd6d3a4 --- /dev/null +++ b/packages/google-cloud-deploy/samples/generated_samples/clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-deploy + + +# [START clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import deploy_v1 + + +def sample_update_deploy_policy(): + # Create a client + client = deploy_v1.CloudDeployClient() + + # Initialize request argument(s) + deploy_policy = deploy_v1.DeployPolicy() + deploy_policy.rules.restrict_rollouts.time_window.time_zone = "time_zone_value" + + request = deploy_v1.UpdateDeployPolicyRequest( + deploy_policy=deploy_policy, + ) + + # Make the request + operation = client.update_deploy_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync] diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index 2b2db863565c..5e19a4c70de5 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "1.19.1" + "version": "0.1.0" }, "snippets": [ { @@ -1355,6 +1355,183 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_create_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.create_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.CreateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "CreateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.CreateDeployPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "deploy_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deploy_policy" + }, + "description": "Sample for CreateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_CreateDeployPolicy_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_create_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2377,19 +2554,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2409,13 +2586,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_async", "segments": [ { "end": 55, @@ -2448,7 +2625,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_async.py" }, { "canonical": true, @@ -2457,19 +2634,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_deploy_policy", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteDeployPolicy", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "DeleteTarget" + "shortName": "DeleteDeployPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" + "type": "google.cloud.deploy_v1.types.DeleteDeployPolicyRequest" }, { "name": "name", @@ -2489,13 +2666,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_target" + "shortName": "delete_deploy_policy" }, - "description": "Sample for DeleteTarget", - "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", + "description": "Sample for DeleteDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteDeployPolicy_sync", "segments": [ { "end": 55, @@ -2528,7 +2705,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_deploy_policy_sync.py" }, { "canonical": true, @@ -2538,19 +2715,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2569,22 +2746,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2599,17 +2776,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_async.py" }, { "canonical": true, @@ -2618,19 +2795,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.delete_target", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", + "fullName": "google.cloud.deploy.v1.CloudDeploy.DeleteTarget", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomationRun" + "shortName": "DeleteTarget" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + "type": "google.cloud.deploy_v1.types.DeleteTargetRequest" }, { "name": "name", @@ -2649,22 +2826,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.AutomationRun", - "shortName": "get_automation_run" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target" }, - "description": "Sample for GetAutomationRun", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "description": "Sample for DeleteTarget", + "file": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_DeleteTarget_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2679,17 +2856,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_delete_target_sync.py" }, { "canonical": true, @@ -2699,19 +2876,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" }, { "name": "name", @@ -2730,14 +2907,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.types.Automation", - "shortName": "get_automation" + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" }, - "description": "Sample for GetAutomation", - "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_async", "segments": [ { "end": 51, @@ -2770,7 +2947,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_async.py" }, { "canonical": true, @@ -2779,14 +2956,175 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation_run", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomationRun", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "GetAutomation" + "shortName": "GetAutomationRun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.AutomationRun", + "shortName": "get_automation_run" + }, + "description": "Sample for GetAutomationRun", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomationRun_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_run_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetAutomationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.Automation", + "shortName": "get_automation" + }, + "description": "Sample for GetAutomation", + "file": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetAutomation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_automation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_automation", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetAutomation", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetAutomation" }, "parameters": [ { @@ -3335,6 +3673,167 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_get_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.get_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.GetDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "GetDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.GetDeployPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.types.DeployPolicy", + "shortName": "get_deploy_policy" + }, + "description": "Sample for GetDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_GetDeployPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_get_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4278,11 +4777,172 @@ "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationRunsPager", "shortName": "list_automation_runs" }, - "description": "Sample for ListAutomationRuns", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "description": "Sample for ListAutomationRuns", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "ListAutomations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", + "shortName": "list_automations" + }, + "description": "Sample for ListAutomations", + "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomationRuns_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", "segments": [ { "end": 52, @@ -4315,7 +4975,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automation_runs_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" }, { "canonical": true, @@ -4325,19 +4985,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4356,14 +5016,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsAsyncPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", "segments": [ { "end": 52, @@ -4396,7 +5056,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" }, { "canonical": true, @@ -4405,19 +5065,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_automations", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListAutomations", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListAutomations" + "shortName": "ListCustomTargetTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListAutomationsRequest" + "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" }, { "name": "parent", @@ -4436,14 +5096,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListAutomationsPager", - "shortName": "list_automations" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", + "shortName": "list_custom_target_types" }, - "description": "Sample for ListAutomations", - "file": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py", + "description": "Sample for ListCustomTargetTypes", + "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListAutomations_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", "segments": [ { "end": 52, @@ -4476,7 +5136,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_automations_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" }, { "canonical": true, @@ -4486,19 +5146,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4517,14 +5177,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesAsyncPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", "segments": [ { "end": 52, @@ -4557,7 +5217,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" }, { "canonical": true, @@ -4566,19 +5226,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_custom_target_types", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListCustomTargetTypes", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListCustomTargetTypes" + "shortName": "ListDeliveryPipelines" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListCustomTargetTypesRequest" + "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" }, { "name": "parent", @@ -4597,14 +5257,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListCustomTargetTypesPager", - "shortName": "list_custom_target_types" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", + "shortName": "list_delivery_pipelines" }, - "description": "Sample for ListCustomTargetTypes", - "file": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py", + "description": "Sample for ListDeliveryPipelines", + "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListCustomTargetTypes_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", "segments": [ { "end": 52, @@ -4637,7 +5297,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_custom_target_types_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" }, { "canonical": true, @@ -4647,19 +5307,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", "shortName": "CloudDeployAsyncClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4678,14 +5338,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesAsyncPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesAsyncPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_async", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_async", "segments": [ { "end": 52, @@ -4718,7 +5378,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_async.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_async.py" }, { "canonical": true, @@ -4727,19 +5387,19 @@ "fullName": "google.cloud.deploy_v1.CloudDeployClient", "shortName": "CloudDeployClient" }, - "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_delivery_pipelines", + "fullName": "google.cloud.deploy_v1.CloudDeployClient.list_deploy_policies", "method": { - "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeliveryPipelines", + "fullName": "google.cloud.deploy.v1.CloudDeploy.ListDeployPolicies", "service": { "fullName": "google.cloud.deploy.v1.CloudDeploy", "shortName": "CloudDeploy" }, - "shortName": "ListDeliveryPipelines" + "shortName": "ListDeployPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.deploy_v1.types.ListDeliveryPipelinesRequest" + "type": "google.cloud.deploy_v1.types.ListDeployPoliciesRequest" }, { "name": "parent", @@ -4758,14 +5418,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeliveryPipelinesPager", - "shortName": "list_delivery_pipelines" + "resultType": "google.cloud.deploy_v1.services.cloud_deploy.pagers.ListDeployPoliciesPager", + "shortName": "list_deploy_policies" }, - "description": "Sample for ListDeliveryPipelines", - "file": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py", + "description": "Sample for ListDeployPolicies", + "file": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeliveryPipelines_sync", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_ListDeployPolicies_sync", "segments": [ { "end": 52, @@ -4798,7 +5458,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "clouddeploy_v1_generated_cloud_deploy_list_delivery_pipelines_sync.py" + "title": "clouddeploy_v1_generated_cloud_deploy_list_deploy_policies_sync.py" }, { "canonical": true, @@ -6466,6 +7126,175 @@ ], "title": "clouddeploy_v1_generated_cloud_deploy_update_delivery_pipeline_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient", + "shortName": "CloudDeployAsyncClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployAsyncClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.deploy_v1.CloudDeployClient", + "shortName": "CloudDeployClient" + }, + "fullName": "google.cloud.deploy_v1.CloudDeployClient.update_deploy_policy", + "method": { + "fullName": "google.cloud.deploy.v1.CloudDeploy.UpdateDeployPolicy", + "service": { + "fullName": "google.cloud.deploy.v1.CloudDeploy", + "shortName": "CloudDeploy" + }, + "shortName": "UpdateDeployPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.deploy_v1.types.UpdateDeployPolicyRequest" + }, + { + "name": "deploy_policy", + "type": "google.cloud.deploy_v1.types.DeployPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deploy_policy" + }, + "description": "Sample for UpdateDeployPolicy", + "file": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "clouddeploy_v1_generated_CloudDeploy_UpdateDeployPolicy_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "clouddeploy_v1_generated_cloud_deploy_update_deploy_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py index 1a652b4ea5be..10255de70644 100644 --- a/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py +++ b/packages/google-cloud-deploy/scripts/fixup_deploy_v1_keywords.py @@ -40,44 +40,49 @@ class deployCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'abandon_release': ('name', ), - 'advance_rollout': ('name', 'phase_id', ), - 'approve_rollout': ('name', 'approved', ), + 'advance_rollout': ('name', 'phase_id', 'override_deploy_policy', ), + 'approve_rollout': ('name', 'approved', 'override_deploy_policy', ), 'cancel_automation_run': ('name', ), - 'cancel_rollout': ('name', ), + 'cancel_rollout': ('name', 'override_deploy_policy', ), 'create_automation': ('parent', 'automation_id', 'automation', 'request_id', 'validate_only', ), 'create_custom_target_type': ('parent', 'custom_target_type_id', 'custom_target_type', 'request_id', 'validate_only', ), 'create_delivery_pipeline': ('parent', 'delivery_pipeline_id', 'delivery_pipeline', 'request_id', 'validate_only', ), - 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', ), - 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'starting_phase_id', ), + 'create_deploy_policy': ('parent', 'deploy_policy_id', 'deploy_policy', 'request_id', 'validate_only', ), + 'create_release': ('parent', 'release_id', 'release', 'request_id', 'validate_only', 'override_deploy_policy', ), + 'create_rollout': ('parent', 'rollout_id', 'rollout', 'request_id', 'validate_only', 'override_deploy_policy', 'starting_phase_id', ), 'create_target': ('parent', 'target_id', 'target', 'request_id', 'validate_only', ), 'delete_automation': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_custom_target_type': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_delivery_pipeline': ('name', 'request_id', 'allow_missing', 'validate_only', 'force', 'etag', ), + 'delete_deploy_policy': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'delete_target': ('name', 'request_id', 'allow_missing', 'validate_only', 'etag', ), 'get_automation': ('name', ), 'get_automation_run': ('name', ), 'get_config': ('name', ), 'get_custom_target_type': ('name', ), 'get_delivery_pipeline': ('name', ), + 'get_deploy_policy': ('name', ), 'get_job_run': ('name', ), 'get_release': ('name', ), 'get_rollout': ('name', ), 'get_target': ('name', ), - 'ignore_job': ('rollout', 'phase_id', 'job_id', ), + 'ignore_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), 'list_automation_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_automations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_custom_target_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_delivery_pipelines': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_deploy_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_job_runs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_releases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_rollouts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_targets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'retry_job': ('rollout', 'phase_id', 'job_id', ), - 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', ), - 'terminate_job_run': ('name', ), + 'retry_job': ('rollout', 'phase_id', 'job_id', 'override_deploy_policy', ), + 'rollback_target': ('name', 'target_id', 'rollout_id', 'release_id', 'rollout_to_roll_back', 'rollback_config', 'validate_only', 'override_deploy_policy', ), + 'terminate_job_run': ('name', 'override_deploy_policy', ), 'update_automation': ('update_mask', 'automation', 'request_id', 'allow_missing', 'validate_only', ), 'update_custom_target_type': ('update_mask', 'custom_target_type', 'request_id', 'allow_missing', 'validate_only', ), 'update_delivery_pipeline': ('update_mask', 'delivery_pipeline', 'request_id', 'allow_missing', 'validate_only', ), + 'update_deploy_policy': ('update_mask', 'deploy_policy', 'request_id', 'allow_missing', 'validate_only', ), 'update_target': ('update_mask', 'target', 'request_id', 'allow_missing', 'validate_only', ), } diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index a666cc961336..995ec88305c1 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -52,6 +52,9 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -9689,11 +9692,11 @@ async def test_abandon_release_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ApproveRolloutRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_approve_rollout(request_type, transport: str = "grpc"): +def test_create_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9704,22 +9707,24 @@ def test_approve_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() - response = client.approve_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) -def test_approve_rollout_empty_call(): +def test_create_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -9728,17 +9733,19 @@ def test_approve_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout() + client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() -def test_approve_rollout_non_empty_request_with_auto_populated_field(): +def test_create_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -9749,24 +9756,30 @@ def test_approve_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ApproveRolloutRequest( - name="name_value", + request = cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.approve_rollout(request=request) + client.create_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateDeployPolicyRequest( + parent="parent_value", + deploy_policy_id="deploy_policy_id_value", + request_id="request_id_value", ) -def test_approve_rollout_use_cached_wrapped_rpc(): +def test_create_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9780,21 +9793,29 @@ def test_approve_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.approve_rollout in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.approve_rollout(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.approve_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9802,7 +9823,7 @@ def test_approve_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_approve_rollout_empty_call_async(): +async def test_create_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -9811,19 +9832,21 @@ async def test_approve_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout() + response = await client.create_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ApproveRolloutRequest() + assert args[0] == cloud_deploy.CreateDeployPolicyRequest() @pytest.mark.asyncio -async def test_approve_rollout_async_use_cached_wrapped_rpc( +async def test_create_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9840,23 +9863,27 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.approve_rollout + client._client._transport.create_deploy_policy ] = mock_object request = {} - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.approve_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9864,8 +9891,8 @@ async def test_approve_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_approve_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest +async def test_create_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9877,43 +9904,47 @@ async def test_approve_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.approve_rollout(request) + response = await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_approve_rollout_async_from_dict(): - await test_approve_rollout_async(request_type=dict) +async def test_create_deploy_policy_async_from_dict(): + await test_create_deploy_policy_async(request_type=dict) -def test_approve_rollout_field_headers(): +def test_create_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: - call.return_value = cloud_deploy.ApproveRolloutResponse() - client.approve_rollout(request) + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9924,28 +9955,30 @@ def test_approve_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_approve_rollout_field_headers_async(): +async def test_create_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ApproveRolloutRequest() + request = cloud_deploy.CreateDeployPolicyRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.approve_rollout(request) + await client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9956,35 +9989,45 @@ async def test_approve_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_approve_rollout_flattened(): +def test_create_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.approve_rollout( - name="name_value", + client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val -def test_approve_rollout_flattened_error(): +def test_create_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9992,43 +10035,55 @@ def test_approve_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.asyncio -async def test_approve_rollout_flattened_async(): +async def test_create_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.create_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ApproveRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ApproveRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.approve_rollout( - name="name_value", + response = await client.create_deploy_policy( + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") + assert arg == mock_val + arg = args[0].deploy_policy_id + mock_val = "deploy_policy_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_approve_rollout_flattened_error_async(): +async def test_create_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10036,20 +10091,22 @@ async def test_approve_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.approve_rollout( - cloud_deploy.ApproveRolloutRequest(), - name="name_value", + await client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), + parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AdvanceRolloutRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_advance_rollout(request_type, transport: str = "grpc"): +def test_update_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10060,22 +10117,24 @@ def test_advance_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() - response = client.advance_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) -def test_advance_rollout_empty_call(): +def test_update_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10084,17 +10143,19 @@ def test_advance_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout() + client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() -def test_advance_rollout_non_empty_request_with_auto_populated_field(): +def test_update_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10105,26 +10166,26 @@ def test_advance_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + request = cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.advance_rollout(request=request) + client.update_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest( - name="name_value", - phase_id="phase_id_value", + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest( + request_id="request_id_value", ) -def test_advance_rollout_use_cached_wrapped_rpc(): +def test_update_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10138,21 +10199,29 @@ def test_advance_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.advance_rollout in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.advance_rollout(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.advance_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10160,7 +10229,7 @@ def test_advance_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_advance_rollout_empty_call_async(): +async def test_update_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10169,19 +10238,21 @@ async def test_advance_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout() + response = await client.update_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.AdvanceRolloutRequest() + assert args[0] == cloud_deploy.UpdateDeployPolicyRequest() @pytest.mark.asyncio -async def test_advance_rollout_async_use_cached_wrapped_rpc( +async def test_update_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10198,23 +10269,27 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.advance_rollout + client._client._transport.update_deploy_policy ] = mock_object request = {} - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.advance_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10222,8 +10297,8 @@ async def test_advance_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_advance_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest +async def test_update_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10235,43 +10310,47 @@ async def test_advance_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.advance_rollout(request) + response = await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_advance_rollout_async_from_dict(): - await test_advance_rollout_async(request_type=dict) +async def test_update_deploy_policy_async_from_dict(): + await test_update_deploy_policy_async(request_type=dict) -def test_advance_rollout_field_headers(): +def test_update_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: - call.return_value = cloud_deploy.AdvanceRolloutResponse() - client.advance_rollout(request) + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10282,28 +10361,30 @@ def test_advance_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_advance_rollout_field_headers_async(): +async def test_update_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.AdvanceRolloutRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() - request.name = "name_value" + request.deploy_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.advance_rollout(request) + await client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10314,39 +10395,41 @@ async def test_advance_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "deploy_policy.name=name_value", ) in kw["metadata"] -def test_advance_rollout_flattened(): +def test_update_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_advance_rollout_flattened_error(): +def test_update_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10354,48 +10437,50 @@ def test_advance_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_advance_rollout_flattened_async(): +async def test_update_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.update_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AdvanceRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AdvanceRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.advance_rollout( - name="name_value", - phase_id="phase_id_value", + response = await client.update_deploy_policy( + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].deploy_policy + mock_val = cloud_deploy.DeployPolicy(name="name_value") assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_advance_rollout_flattened_error_async(): +async def test_update_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10403,21 +10488,21 @@ async def test_advance_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.advance_rollout( - cloud_deploy.AdvanceRolloutRequest(), - name="name_value", - phase_id="phase_id_value", + await client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CancelRolloutRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_cancel_rollout(request_type, transport: str = "grpc"): +def test_delete_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10428,22 +10513,24 @@ def test_cancel_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() - response = client.cancel_rollout(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) -def test_cancel_rollout_empty_call(): +def test_delete_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10452,17 +10539,19 @@ def test_cancel_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout() + client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() -def test_cancel_rollout_non_empty_request_with_auto_populated_field(): +def test_delete_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10473,24 +10562,30 @@ def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelRolloutRequest( + request = cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_rollout(request=request) + client.delete_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest( + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_rollout_use_cached_wrapped_rpc(): +def test_delete_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10504,21 +10599,29 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.cancel_rollout in client._transport._wrapped_methods + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc request = {} - client.cancel_rollout(request) + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10526,7 +10629,7 @@ def test_cancel_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_rollout_empty_call_async(): +async def test_delete_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10535,19 +10638,21 @@ async def test_cancel_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout() + response = await client.delete_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelRolloutRequest() + assert args[0] == cloud_deploy.DeleteDeployPolicyRequest() @pytest.mark.asyncio -async def test_cancel_rollout_async_use_cached_wrapped_rpc( +async def test_delete_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10564,23 +10669,27 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_rollout + client._client._transport.delete_deploy_policy ] = mock_object request = {} - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.cancel_rollout(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10588,8 +10697,8 @@ async def test_cancel_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest +async def test_delete_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10601,43 +10710,47 @@ async def test_cancel_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_rollout(request) + response = await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelRolloutResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_rollout_async_from_dict(): - await test_cancel_rollout_async(request_type=dict) +async def test_delete_deploy_policy_async_from_dict(): + await test_delete_deploy_policy_async(request_type=dict) -def test_cancel_rollout_field_headers(): +def test_delete_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: - call.return_value = cloud_deploy.CancelRolloutResponse() - client.cancel_rollout(request) + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10653,23 +10766,25 @@ def test_cancel_rollout_field_headers(): @pytest.mark.asyncio -async def test_cancel_rollout_field_headers_async(): +async def test_delete_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelRolloutRequest() + request = cloud_deploy.DeleteDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_rollout(request) + await client.delete_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10684,18 +10799,20 @@ async def test_cancel_rollout_field_headers_async(): ) in kw["metadata"] -def test_cancel_rollout_flattened(): +def test_delete_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_rollout( + client.delete_deploy_policy( name="name_value", ) @@ -10708,7 +10825,7 @@ def test_cancel_rollout_flattened(): assert arg == mock_val -def test_cancel_rollout_flattened_error(): +def test_delete_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10716,29 +10833,31 @@ def test_cancel_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_rollout_flattened_async(): +async def test_delete_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelRolloutResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelRolloutResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_rollout( + response = await client.delete_deploy_policy( name="name_value", ) @@ -10752,7 +10871,7 @@ async def test_cancel_rollout_flattened_async(): @pytest.mark.asyncio -async def test_cancel_rollout_flattened_error_async(): +async def test_delete_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10760,8 +10879,8 @@ async def test_cancel_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_rollout( - cloud_deploy.CancelRolloutRequest(), + await client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), name="name_value", ) @@ -10769,11 +10888,11 @@ async def test_cancel_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListRolloutsRequest, + cloud_deploy.ListDeployPoliciesRequest, dict, ], ) -def test_list_rollouts(request_type, transport: str = "grpc"): +def test_list_deploy_policies(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10784,27 +10903,29 @@ def test_list_rollouts(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse( + call.return_value = cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_rollouts(request) + response = client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsPager) + assert isinstance(response, pagers.ListDeployPoliciesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_rollouts_empty_call(): +def test_list_deploy_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -10813,17 +10934,19 @@ def test_list_rollouts_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts() + client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() -def test_list_rollouts_non_empty_request_with_auto_populated_field(): +def test_list_deploy_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -10834,7 +10957,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListRolloutsRequest( + request = cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10842,14 +10965,16 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_rollouts(request=request) + client.list_deploy_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest( + assert args[0] == cloud_deploy.ListDeployPoliciesRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -10857,7 +10982,7 @@ def test_list_rollouts_non_empty_request_with_auto_populated_field(): ) -def test_list_rollouts_use_cached_wrapped_rpc(): +def test_list_deploy_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10871,21 +10996,25 @@ def test_list_rollouts_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_rollouts in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_rollouts(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10893,7 +11022,7 @@ def test_list_rollouts_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_rollouts_empty_call_async(): +async def test_list_deploy_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -10902,22 +11031,24 @@ async def test_list_rollouts_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts() + response = await client.list_deploy_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListRolloutsRequest() + assert args[0] == cloud_deploy.ListDeployPoliciesRequest() @pytest.mark.asyncio -async def test_list_rollouts_async_use_cached_wrapped_rpc( +async def test_list_deploy_policies_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10934,23 +11065,23 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_rollouts + client._client._transport.list_deploy_policies ] = mock_object request = {} - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10958,8 +11089,8 @@ async def test_list_rollouts_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_rollouts_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest +async def test_list_deploy_policies_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10971,48 +11102,52 @@ async def test_list_rollouts_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse( + cloud_deploy.ListDeployPoliciesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_rollouts(request) + response = await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRolloutsAsyncPager) + assert isinstance(response, pagers.ListDeployPoliciesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_rollouts_async_from_dict(): - await test_list_rollouts_async(request_type=dict) +async def test_list_deploy_policies_async_from_dict(): + await test_list_deploy_policies_async(request_type=dict) -def test_list_rollouts_field_headers(): +def test_list_deploy_policies_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: - call.return_value = cloud_deploy.ListRolloutsResponse() - client.list_rollouts(request) + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: + call.return_value = cloud_deploy.ListDeployPoliciesResponse() + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11028,23 +11163,25 @@ def test_list_rollouts_field_headers(): @pytest.mark.asyncio -async def test_list_rollouts_field_headers_async(): +async def test_list_deploy_policies_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListRolloutsRequest() + request = cloud_deploy.ListDeployPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) - await client.list_rollouts(request) + await client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11059,18 +11196,20 @@ async def test_list_rollouts_field_headers_async(): ) in kw["metadata"] -def test_list_rollouts_flattened(): +def test_list_deploy_policies_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_rollouts( + client.list_deploy_policies( parent="parent_value", ) @@ -11083,7 +11222,7 @@ def test_list_rollouts_flattened(): assert arg == mock_val -def test_list_rollouts_flattened_error(): +def test_list_deploy_policies_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11091,29 +11230,31 @@ def test_list_rollouts_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_rollouts_flattened_async(): +async def test_list_deploy_policies_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListRolloutsResponse() + call.return_value = cloud_deploy.ListDeployPoliciesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListRolloutsResponse() + cloud_deploy.ListDeployPoliciesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_rollouts( + response = await client.list_deploy_policies( parent="parent_value", ) @@ -11127,7 +11268,7 @@ async def test_list_rollouts_flattened_async(): @pytest.mark.asyncio -async def test_list_rollouts_flattened_error_async(): +async def test_list_deploy_policies_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11135,44 +11276,46 @@ async def test_list_rollouts_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_rollouts( - cloud_deploy.ListRolloutsRequest(), + await client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", ) -def test_list_rollouts_pager(transport_name: str = "grpc"): +def test_list_deploy_policies_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11182,95 +11325,99 @@ def test_list_rollouts_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_rollouts(request={}) + pager = client.list_deploy_policies(request={}) assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in results) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) -def test_list_rollouts_pages(transport_name: str = "grpc"): +def test_list_deploy_policies_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + with mock.patch.object( + type(client.transport.list_deploy_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - pages = list(client.list_rollouts(request={}).pages) + pages = list(client.list_deploy_policies(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_rollouts_async_pager(): +async def test_list_deploy_policies_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, ) - async_pager = await client.list_rollouts( + async_pager = await client.list_deploy_policies( request={}, ) assert async_pager.next_page_token == "abc" @@ -11279,43 +11426,45 @@ async def test_list_rollouts_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in responses) @pytest.mark.asyncio -async def test_list_rollouts_async_pages(): +async def test_list_deploy_policies_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_deploy_policies), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], next_page_token="abc", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[], + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], next_page_token="def", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), ], next_page_token="ghi", ), - cloud_deploy.ListRolloutsResponse( - rollouts=[ - cloud_deploy.Rollout(), - cloud_deploy.Rollout(), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), ], ), RuntimeError, @@ -11324,7 +11473,7 @@ async def test_list_rollouts_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_rollouts(request={}) + await client.list_deploy_policies(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -11334,11 +11483,11 @@ async def test_list_rollouts_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetRolloutRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_get_rollout(request_type, transport: str = "grpc"): +def test_get_deploy_policy(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11349,52 +11498,35 @@ def test_get_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout( + call.return_value = cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_rollout(request) + response = client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_rollout_empty_call(): +def test_get_deploy_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11403,17 +11535,19 @@ def test_get_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout() + client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() -def test_get_rollout_non_empty_request_with_auto_populated_field(): +def test_get_deploy_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11424,24 +11558,26 @@ def test_get_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetRolloutRequest( + request = cloud_deploy.GetDeployPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_rollout(request=request) + client.get_deploy_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest( + assert args[0] == cloud_deploy.GetDeployPolicyRequest( name="name_value", ) -def test_get_rollout_use_cached_wrapped_rpc(): +def test_get_deploy_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11455,21 +11591,23 @@ def test_get_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_rollout in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_rollout(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11477,7 +11615,7 @@ def test_get_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_rollout_empty_call_async(): +async def test_get_deploy_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11486,33 +11624,27 @@ async def test_get_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout() + response = await client.get_deploy_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetRolloutRequest() + assert args[0] == cloud_deploy.GetDeployPolicyRequest() @pytest.mark.asyncio -async def test_get_rollout_async_use_cached_wrapped_rpc( +async def test_get_deploy_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11529,23 +11661,23 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_rollout + client._client._transport.get_deploy_policy in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_rollout + client._client._transport.get_deploy_policy ] = mock_object request = {} - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11553,8 +11685,8 @@ async def test_get_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest +async def test_get_deploy_policy_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11566,73 +11698,58 @@ async def test_get_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout( + cloud_deploy.DeployPolicy( name="name_value", uid="uid_value", description="description_value", - target_id="target_id_value", - approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, - state=cloud_deploy.Rollout.State.SUCCEEDED, - failure_reason="failure_reason_value", - deploying_build="deploying_build_value", + suspended=True, etag="etag_value", - deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, - controller_rollout="controller_rollout_value", - rollback_of_rollout="rollback_of_rollout_value", - rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_rollout(request) + response = await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Rollout) + assert isinstance(response, cloud_deploy.DeployPolicy) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" - assert response.target_id == "target_id_value" - assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL - assert response.state == cloud_deploy.Rollout.State.SUCCEEDED - assert response.failure_reason == "failure_reason_value" - assert response.deploying_build == "deploying_build_value" + assert response.suspended is True assert response.etag == "etag_value" - assert ( - response.deploy_failure_cause - == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE - ) - assert response.controller_rollout == "controller_rollout_value" - assert response.rollback_of_rollout == "rollback_of_rollout_value" - assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_rollout_async_from_dict(): - await test_get_rollout_async(request_type=dict) +async def test_get_deploy_policy_async_from_dict(): + await test_get_deploy_policy_async(request_type=dict) -def test_get_rollout_field_headers(): +def test_get_deploy_policy_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: - call.return_value = cloud_deploy.Rollout() - client.get_rollout(request) + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: + call.return_value = cloud_deploy.DeployPolicy() + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11648,23 +11765,25 @@ def test_get_rollout_field_headers(): @pytest.mark.asyncio -async def test_get_rollout_field_headers_async(): +async def test_get_deploy_policy_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetRolloutRequest() + request = cloud_deploy.GetDeployPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) - await client.get_rollout(request) + await client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11679,18 +11798,20 @@ async def test_get_rollout_field_headers_async(): ) in kw["metadata"] -def test_get_rollout_flattened(): +def test_get_deploy_policy_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_rollout( + client.get_deploy_policy( name="name_value", ) @@ -11703,7 +11824,7 @@ def test_get_rollout_flattened(): assert arg == mock_val -def test_get_rollout_flattened_error(): +def test_get_deploy_policy_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11711,29 +11832,31 @@ def test_get_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollout( - cloud_deploy.GetRolloutRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_rollout_flattened_async(): +async def test_get_deploy_policy_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + with mock.patch.object( + type(client.transport.get_deploy_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Rollout() + call.return_value = cloud_deploy.DeployPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Rollout() + cloud_deploy.DeployPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_rollout( + response = await client.get_deploy_policy( name="name_value", ) @@ -11747,7 +11870,7 @@ async def test_get_rollout_flattened_async(): @pytest.mark.asyncio -async def test_get_rollout_flattened_error_async(): +async def test_get_deploy_policy_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11755,8 +11878,8 @@ async def test_get_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_rollout( - cloud_deploy.GetRolloutRequest(), + await client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) @@ -11764,11 +11887,11 @@ async def test_get_rollout_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateRolloutRequest, + cloud_deploy.ApproveRolloutRequest, dict, ], ) -def test_create_rollout(request_type, transport: str = "grpc"): +def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11779,22 +11902,22 @@ def test_create_rollout(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_rollout(request) + call.return_value = cloud_deploy.ApproveRolloutResponse() + response = client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) -def test_create_rollout_empty_call(): +def test_approve_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -11803,17 +11926,17 @@ def test_create_rollout_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout() + client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() -def test_create_rollout_non_empty_request_with_auto_populated_field(): +def test_approve_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -11824,30 +11947,24 @@ def test_create_rollout_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + request = cloud_deploy.ApproveRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_rollout(request=request) + client.approve_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest( - parent="parent_value", - rollout_id="rollout_id_value", - request_id="request_id_value", - starting_phase_id="starting_phase_id_value", + assert args[0] == cloud_deploy.ApproveRolloutRequest( + name="name_value", ) -def test_create_rollout_use_cached_wrapped_rpc(): +def test_approve_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11861,25 +11978,21 @@ def test_create_rollout_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_rollout in client._transport._wrapped_methods + assert client._transport.approve_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc + client._transport._wrapped_methods[client._transport.approve_rollout] = mock_rpc request = {} - client.create_rollout(request) + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_rollout(request) + client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11887,7 +12000,7 @@ def test_create_rollout_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_rollout_empty_call_async(): +async def test_approve_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -11896,19 +12009,19 @@ async def test_create_rollout_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout() + response = await client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateRolloutRequest() + assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio -async def test_create_rollout_async_use_cached_wrapped_rpc( +async def test_approve_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11925,27 +12038,23 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_rollout + client._client._transport.approve_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.create_rollout + client._client._transport.approve_rollout ] = mock_object request = {} - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11953,8 +12062,8 @@ async def test_create_rollout_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_rollout_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest +async def test_approve_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11966,43 +12075,43 @@ async def test_create_rollout_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) - response = await client.create_rollout(request) + response = await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio -async def test_create_rollout_async_from_dict(): - await test_create_rollout_async(request_type=dict) +async def test_approve_rollout_async_from_dict(): + await test_approve_rollout_async(request_type=dict) -def test_create_rollout_field_headers(): +def test_approve_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_rollout(request) + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: + call.return_value = cloud_deploy.ApproveRolloutResponse() + client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12013,28 +12122,28 @@ def test_create_rollout_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_rollout_field_headers_async(): +async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateRolloutRequest() + request = cloud_deploy.ApproveRolloutRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ApproveRolloutResponse() ) - await client.create_rollout(request) + await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12045,43 +12154,35 @@ async def test_create_rollout_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_rollout_flattened(): +def test_approve_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_rollout_flattened_error(): +def test_approve_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12089,53 +12190,43 @@ def test_create_rollout_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_rollout_flattened_async(): +async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ApproveRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_rollout( - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + response = await client.approve_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].rollout - mock_val = cloud_deploy.Rollout(name="name_value") - assert arg == mock_val - arg = args[0].rollout_id - mock_val = "rollout_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_rollout_flattened_error_async(): +async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12143,22 +12234,20 @@ async def test_create_rollout_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_rollout( - cloud_deploy.CreateRolloutRequest(), - parent="parent_value", - rollout=cloud_deploy.Rollout(name="name_value"), - rollout_id="rollout_id_value", + await client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.IgnoreJobRequest, + cloud_deploy.AdvanceRolloutRequest, dict, ], ) -def test_ignore_job(request_type, transport: str = "grpc"): +def test_advance_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12169,22 +12258,22 @@ def test_ignore_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() - response = client.ignore_job(request) + call.return_value = cloud_deploy.AdvanceRolloutResponse() + response = client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) -def test_ignore_job_empty_call(): +def test_advance_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12193,17 +12282,17 @@ def test_ignore_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job() + client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() -def test_ignore_job_non_empty_request_with_auto_populated_field(): +def test_advance_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12214,28 +12303,26 @@ def test_ignore_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + request = cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.ignore_job(request=request) + client.advance_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest( - rollout="rollout_value", + assert args[0] == cloud_deploy.AdvanceRolloutRequest( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) -def test_ignore_job_use_cached_wrapped_rpc(): +def test_advance_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12249,21 +12336,21 @@ def test_ignore_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.ignore_job in client._transport._wrapped_methods + assert client._transport.advance_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc + client._transport._wrapped_methods[client._transport.advance_rollout] = mock_rpc request = {} - client.ignore_job(request) + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.ignore_job(request) + client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12271,7 +12358,7 @@ def test_ignore_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_ignore_job_empty_call_async(): +async def test_advance_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12280,19 +12367,21 @@ async def test_ignore_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job() + response = await client.advance_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.IgnoreJobRequest() + assert args[0] == cloud_deploy.AdvanceRolloutRequest() @pytest.mark.asyncio -async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_advance_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12307,23 +12396,23 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.ignore_job + client._client._transport.advance_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.ignore_job + client._client._transport.advance_rollout ] = mock_object request = {} - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12331,8 +12420,8 @@ async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_ignore_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest +async def test_advance_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.AdvanceRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12344,43 +12433,43 @@ async def test_ignore_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - response = await client.ignore_job(request) + response = await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.IgnoreJobResponse) + assert isinstance(response, cloud_deploy.AdvanceRolloutResponse) @pytest.mark.asyncio -async def test_ignore_job_async_from_dict(): - await test_ignore_job_async(request_type=dict) +async def test_advance_rollout_async_from_dict(): + await test_advance_rollout_async(request_type=dict) -def test_ignore_job_field_headers(): +def test_advance_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: - call.return_value = cloud_deploy.IgnoreJobResponse() - client.ignore_job(request) + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: + call.return_value = cloud_deploy.AdvanceRolloutResponse() + client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12391,28 +12480,28 @@ def test_ignore_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_ignore_job_field_headers_async(): +async def test_advance_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.IgnoreJobRequest() + request = cloud_deploy.AdvanceRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) - await client.ignore_job(request) + await client.advance_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12423,43 +12512,39 @@ async def test_ignore_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_ignore_job_flattened(): +def test_advance_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.ignore_job( - rollout="rollout_value", + client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val -def test_ignore_job_flattened_error(): +def test_advance_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12467,53 +12552,48 @@ def test_ignore_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.asyncio -async def test_ignore_job_flattened_async(): +async def test_advance_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + with mock.patch.object(type(client.transport.advance_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.IgnoreJobResponse() + call.return_value = cloud_deploy.AdvanceRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.IgnoreJobResponse() + cloud_deploy.AdvanceRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.ignore_job( - rollout="rollout_value", + response = await client.advance_rollout( + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val arg = args[0].phase_id mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_ignore_job_flattened_error_async(): +async def test_advance_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12521,22 +12601,21 @@ async def test_ignore_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.ignore_job( - cloud_deploy.IgnoreJobRequest(), - rollout="rollout_value", + await client.advance_rollout( + cloud_deploy.AdvanceRolloutRequest(), + name="name_value", phase_id="phase_id_value", - job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RetryJobRequest, + cloud_deploy.CancelRolloutRequest, dict, ], ) -def test_retry_job(request_type, transport: str = "grpc"): +def test_cancel_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12547,22 +12626,22 @@ def test_retry_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() - response = client.retry_job(request) + call.return_value = cloud_deploy.CancelRolloutResponse() + response = client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) -def test_retry_job_empty_call(): +def test_cancel_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12571,17 +12650,17 @@ def test_retry_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job() + client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() -def test_retry_job_non_empty_request_with_auto_populated_field(): +def test_cancel_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12592,28 +12671,24 @@ def test_retry_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + request = cloud_deploy.CancelRolloutRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.retry_job(request=request) + client.cancel_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + assert args[0] == cloud_deploy.CancelRolloutRequest( + name="name_value", ) -def test_retry_job_use_cached_wrapped_rpc(): +def test_cancel_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12627,21 +12702,21 @@ def test_retry_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.retry_job in client._transport._wrapped_methods + assert client._transport.cancel_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_rollout] = mock_rpc request = {} - client.retry_job(request) + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.retry_job(request) + client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12649,7 +12724,7 @@ def test_retry_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_retry_job_empty_call_async(): +async def test_cancel_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -12658,19 +12733,21 @@ async def test_retry_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job() + response = await client.cancel_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.RetryJobRequest() + assert args[0] == cloud_deploy.CancelRolloutRequest() @pytest.mark.asyncio -async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_rollout_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12685,23 +12762,23 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.retry_job + client._client._transport.cancel_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.retry_job + client._client._transport.cancel_rollout ] = mock_object request = {} - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12709,8 +12786,8 @@ async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_retry_job_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest +async def test_cancel_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CancelRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12722,43 +12799,43 @@ async def test_retry_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - response = await client.retry_job(request) + response = await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RetryJobResponse) + assert isinstance(response, cloud_deploy.CancelRolloutResponse) @pytest.mark.asyncio -async def test_retry_job_async_from_dict(): - await test_retry_job_async(request_type=dict) +async def test_cancel_rollout_async_from_dict(): + await test_cancel_rollout_async(request_type=dict) -def test_retry_job_field_headers(): +def test_cancel_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: - call.return_value = cloud_deploy.RetryJobResponse() - client.retry_job(request) + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: + call.return_value = cloud_deploy.CancelRolloutResponse() + client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12769,28 +12846,28 @@ def test_retry_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_retry_job_field_headers_async(): +async def test_cancel_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.RetryJobRequest() + request = cloud_deploy.CancelRolloutRequest() - request.rollout = "rollout_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) - await client.retry_job(request) + await client.cancel_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12801,43 +12878,35 @@ async def test_retry_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "rollout=rollout_value", + "name=name_value", ) in kw["metadata"] -def test_retry_job_flattened(): +def test_cancel_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_retry_job_flattened_error(): +def test_cancel_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12845,53 +12914,43 @@ def test_retry_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_retry_job_flattened_async(): +async def test_cancel_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.RetryJobResponse() + call.return_value = cloud_deploy.CancelRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.RetryJobResponse() + cloud_deploy.CancelRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.retry_job( - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + response = await client.cancel_rollout( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].rollout - mock_val = "rollout_value" - assert arg == mock_val - arg = args[0].phase_id - mock_val = "phase_id_value" - assert arg == mock_val - arg = args[0].job_id - mock_val = "job_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_retry_job_flattened_error_async(): +async def test_cancel_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12899,22 +12958,20 @@ async def test_retry_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.retry_job( - cloud_deploy.RetryJobRequest(), - rollout="rollout_value", - phase_id="phase_id_value", - job_id="job_id_value", + await client.cancel_rollout( + cloud_deploy.CancelRolloutRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListJobRunsRequest, + cloud_deploy.ListRolloutsRequest, dict, ], ) -def test_list_job_runs(request_type, transport: str = "grpc"): +def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12925,27 +12982,27 @@ def test_list_job_runs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse( + call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_job_runs(request) + response = client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsPager) + assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_job_runs_empty_call(): +def test_list_rollouts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -12954,17 +13011,17 @@ def test_list_job_runs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs() + client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() -def test_list_job_runs_non_empty_request_with_auto_populated_field(): +def test_list_rollouts_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -12975,7 +13032,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListJobRunsRequest( + request = cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -12983,14 +13040,14 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_job_runs(request=request) + client.list_rollouts(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest( + assert args[0] == cloud_deploy.ListRolloutsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -12998,7 +13055,7 @@ def test_list_job_runs_non_empty_request_with_auto_populated_field(): ) -def test_list_job_runs_use_cached_wrapped_rpc(): +def test_list_rollouts_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13012,21 +13069,21 @@ def test_list_job_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_job_runs in client._transport._wrapped_methods + assert client._transport.list_rollouts in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rollouts] = mock_rpc request = {} - client.list_job_runs(request) + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_job_runs(request) + client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13034,7 +13091,7 @@ def test_list_job_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_job_runs_empty_call_async(): +async def test_list_rollouts_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13043,22 +13100,22 @@ async def test_list_job_runs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs() + response = await client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListJobRunsRequest() + assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio -async def test_list_job_runs_async_use_cached_wrapped_rpc( +async def test_list_rollouts_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13075,23 +13132,23 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_job_runs + client._client._transport.list_rollouts in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_job_runs + client._client._transport.list_rollouts ] = mock_object request = {} - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13099,8 +13156,8 @@ async def test_list_job_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_job_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest +async def test_list_rollouts_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13112,48 +13169,48 @@ async def test_list_job_runs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse( + cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_job_runs(request) + response = await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_job_runs_async_from_dict(): - await test_list_job_runs_async(request_type=dict) +async def test_list_rollouts_async_from_dict(): + await test_list_rollouts_async(request_type=dict) -def test_list_job_runs_field_headers(): +def test_list_rollouts_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: - call.return_value = cloud_deploy.ListJobRunsResponse() - client.list_job_runs(request) + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: + call.return_value = cloud_deploy.ListRolloutsResponse() + client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13169,23 +13226,23 @@ def test_list_job_runs_field_headers(): @pytest.mark.asyncio -async def test_list_job_runs_field_headers_async(): +async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListJobRunsRequest() + request = cloud_deploy.ListRolloutsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) - await client.list_job_runs(request) + await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13200,18 +13257,18 @@ async def test_list_job_runs_field_headers_async(): ) in kw["metadata"] -def test_list_job_runs_flattened(): +def test_list_rollouts_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_job_runs( + client.list_rollouts( parent="parent_value", ) @@ -13224,7 +13281,7 @@ def test_list_job_runs_flattened(): assert arg == mock_val -def test_list_job_runs_flattened_error(): +def test_list_rollouts_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13232,29 +13289,29 @@ def test_list_job_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_job_runs_flattened_async(): +async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListJobRunsResponse() + call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListJobRunsResponse() + cloud_deploy.ListRolloutsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_job_runs( + response = await client.list_rollouts( parent="parent_value", ) @@ -13268,7 +13325,7 @@ async def test_list_job_runs_flattened_async(): @pytest.mark.asyncio -async def test_list_job_runs_flattened_error_async(): +async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13276,44 +13333,44 @@ async def test_list_job_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_job_runs( - cloud_deploy.ListJobRunsRequest(), + await client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) -def test_list_job_runs_pager(transport_name: str = "grpc"): +def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13323,95 +13380,95 @@ def test_list_job_runs_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_job_runs(request={}) + pager = client.list_rollouts(request={}) assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + assert all(isinstance(i, cloud_deploy.Rollout) for i in results) -def test_list_job_runs_pages(transport_name: str = "grpc"): +def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - pages = list(client.list_job_runs(request={}).pages) + pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_job_runs_async_pager(): +async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, ) - async_pager = await client.list_job_runs( + async_pager = await client.list_rollouts( request={}, ) assert async_pager.next_page_token == "abc" @@ -13420,43 +13477,43 @@ async def test_list_job_runs_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio -async def test_list_job_runs_async_pages(): +async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], next_page_token="abc", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[], + cloud_deploy.ListRolloutsResponse( + rollouts=[], next_page_token="def", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), ], next_page_token="ghi", ), - cloud_deploy.ListJobRunsResponse( - job_runs=[ - cloud_deploy.JobRun(), - cloud_deploy.JobRun(), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), ], ), RuntimeError, @@ -13465,7 +13522,7 @@ async def test_list_job_runs_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_job_runs(request={}) + await client.list_rollouts(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13475,11 +13532,11 @@ async def test_list_job_runs_async_pages(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetJobRunRequest, + cloud_deploy.GetRolloutRequest, dict, ], ) -def test_get_job_run(request_type, transport: str = "grpc"): +def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13490,35 +13547,52 @@ def test_get_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun( + call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) - response = client.get_job_run(request) + response = client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] -def test_get_job_run_empty_call(): +def test_get_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13527,17 +13601,17 @@ def test_get_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run() + client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() -def test_get_job_run_non_empty_request_with_auto_populated_field(): +def test_get_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13548,24 +13622,24 @@ def test_get_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetJobRunRequest( + request = cloud_deploy.GetRolloutRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_job_run(request=request) + client.get_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest( + assert args[0] == cloud_deploy.GetRolloutRequest( name="name_value", ) -def test_get_job_run_use_cached_wrapped_rpc(): +def test_get_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13579,21 +13653,21 @@ def test_get_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job_run in client._transport._wrapped_methods + assert client._transport.get_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rollout] = mock_rpc request = {} - client.get_job_run(request) + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job_run(request) + client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13601,7 +13675,7 @@ def test_get_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_job_run_empty_call_async(): +async def test_get_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13610,26 +13684,33 @@ async def test_get_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run() + response = await client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetJobRunRequest() + assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio -async def test_get_job_run_async_use_cached_wrapped_rpc( +async def test_get_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13646,23 +13727,23 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_job_run + client._client._transport.get_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_job_run + client._client._transport.get_rollout ] = mock_object request = {} - await client.get_job_run(request) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_job_run(request) + await client.get_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13670,8 +13751,8 @@ async def test_get_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest +async def test_get_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13683,56 +13764,73 @@ async def test_get_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.JobRun( + cloud_deploy.Rollout( name="name_value", uid="uid_value", - phase_id="phase_id_value", - job_id="job_id_value", - state=cloud_deploy.JobRun.State.IN_PROGRESS, + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + controller_rollout="controller_rollout_value", + rollback_of_rollout="rollback_of_rollout_value", + rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], ) ) - response = await client.get_job_run(request) + response = await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.JobRun) + assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" - assert response.phase_id == "phase_id_value" - assert response.job_id == "job_id_value" - assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + assert response.controller_rollout == "controller_rollout_value" + assert response.rollback_of_rollout == "rollback_of_rollout_value" + assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] @pytest.mark.asyncio -async def test_get_job_run_async_from_dict(): - await test_get_job_run_async(request_type=dict) +async def test_get_rollout_async_from_dict(): + await test_get_rollout_async(request_type=dict) -def test_get_job_run_field_headers(): +def test_get_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = cloud_deploy.JobRun() - client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = cloud_deploy.Rollout() + client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13748,21 +13846,23 @@ def test_get_job_run_field_headers(): @pytest.mark.asyncio -async def test_get_job_run_field_headers_async(): +async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetJobRunRequest() + request = cloud_deploy.GetRolloutRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) - await client.get_job_run(request) + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) + await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13777,18 +13877,18 @@ async def test_get_job_run_field_headers_async(): ) in kw["metadata"] -def test_get_job_run_flattened(): +def test_get_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job_run( + client.get_rollout( name="name_value", ) @@ -13801,7 +13901,7 @@ def test_get_job_run_flattened(): assert arg == mock_val -def test_get_job_run_flattened_error(): +def test_get_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13809,27 +13909,29 @@ def test_get_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job_run( - cloud_deploy.GetJobRunRequest(), + client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_job_run_flattened_async(): +async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.JobRun() + call.return_value = cloud_deploy.Rollout() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Rollout() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job_run( + response = await client.get_rollout( name="name_value", ) @@ -13843,7 +13945,7 @@ async def test_get_job_run_flattened_async(): @pytest.mark.asyncio -async def test_get_job_run_flattened_error_async(): +async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13851,8 +13953,8 @@ async def test_get_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_job_run( - cloud_deploy.GetJobRunRequest(), + await client.get_rollout( + cloud_deploy.GetRolloutRequest(), name="name_value", ) @@ -13860,11 +13962,11 @@ async def test_get_job_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.TerminateJobRunRequest, + cloud_deploy.CreateRolloutRequest, dict, ], ) -def test_terminate_job_run(request_type, transport: str = "grpc"): +def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13875,24 +13977,22 @@ def test_terminate_job_run(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() - response = client.terminate_job_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) -def test_terminate_job_run_empty_call(): +def test_create_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -13901,19 +14001,17 @@ def test_terminate_job_run_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run() + client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() -def test_terminate_job_run_non_empty_request_with_auto_populated_field(): +def test_create_rollout_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -13924,26 +14022,30 @@ def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.TerminateJobRunRequest( - name="name_value", + request = cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.terminate_job_run(request=request) + client.create_rollout(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateRolloutRequest( + parent="parent_value", + rollout_id="rollout_id_value", + request_id="request_id_value", + starting_phase_id="starting_phase_id_value", ) -def test_terminate_job_run_use_cached_wrapped_rpc(): +def test_create_rollout_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13957,23 +14059,25 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.terminate_job_run in client._transport._wrapped_methods + assert client._transport.create_rollout in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.terminate_job_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rollout] = mock_rpc request = {} - client.terminate_job_run(request) + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.terminate_job_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13981,7 +14085,7 @@ def test_terminate_job_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_terminate_job_run_empty_call_async(): +async def test_create_rollout_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -13990,21 +14094,19 @@ async def test_terminate_job_run_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run() + response = await client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.TerminateJobRunRequest() + assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio -async def test_terminate_job_run_async_use_cached_wrapped_rpc( +async def test_create_rollout_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14021,23 +14123,27 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.terminate_job_run + client._client._transport.create_rollout in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.terminate_job_run + client._client._transport.create_rollout ] = mock_object request = {} - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.terminate_job_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_rollout(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14045,8 +14151,8 @@ async def test_terminate_job_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_terminate_job_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest +async def test_create_rollout_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14058,47 +14164,43 @@ async def test_terminate_job_run_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.terminate_job_run(request) + response = await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.TerminateJobRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_terminate_job_run_async_from_dict(): - await test_terminate_job_run_async(request_type=dict) +async def test_create_rollout_async_from_dict(): + await test_create_rollout_async(request_type=dict) -def test_terminate_job_run_field_headers(): +def test_create_rollout_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: - call.return_value = cloud_deploy.TerminateJobRunResponse() - client.terminate_job_run(request) + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14109,30 +14211,28 @@ def test_terminate_job_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_terminate_job_run_field_headers_async(): +async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.TerminateJobRunRequest() + request = cloud_deploy.CreateRolloutRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.terminate_job_run(request) + await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14143,37 +14243,43 @@ async def test_terminate_job_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_terminate_job_run_flattened(): +def test_create_rollout_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.terminate_job_run( - name="name_value", + client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val -def test_terminate_job_run_flattened_error(): +def test_create_rollout_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14181,45 +14287,53 @@ def test_terminate_job_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.asyncio -async def test_terminate_job_run_flattened_async(): +async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.terminate_job_run), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.TerminateJobRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.TerminateJobRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.terminate_job_run( - name="name_value", + response = await client.create_rollout( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rollout + mock_val = cloud_deploy.Rollout(name="name_value") + assert arg == mock_val + arg = args[0].rollout_id + mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_terminate_job_run_flattened_error_async(): +async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14227,20 +14341,22 @@ async def test_terminate_job_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.terminate_job_run( - cloud_deploy.TerminateJobRunRequest(), - name="name_value", + await client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetConfigRequest, + cloud_deploy.IgnoreJobRequest, dict, ], ) -def test_get_config(request_type, transport: str = "grpc"): +def test_ignore_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14251,27 +14367,22 @@ def test_get_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) - response = client.get_config(request) + call.return_value = cloud_deploy.IgnoreJobResponse() + response = client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) -def test_get_config_empty_call(): +def test_ignore_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14280,17 +14391,17 @@ def test_get_config_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config() + client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() -def test_get_config_non_empty_request_with_auto_populated_field(): +def test_ignore_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14301,24 +14412,28 @@ def test_get_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetConfigRequest( - name="name_value", + request = cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_config(request=request) + client.ignore_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest( - name="name_value", + assert args[0] == cloud_deploy.IgnoreJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_get_config_use_cached_wrapped_rpc(): +def test_ignore_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14332,21 +14447,21 @@ def test_get_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_config in client._transport._wrapped_methods + assert client._transport.ignore_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_config] = mock_rpc + client._transport._wrapped_methods[client._transport.ignore_job] = mock_rpc request = {} - client.get_config(request) + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_config(request) + client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14354,7 +14469,7 @@ def test_get_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_config_empty_call_async(): +async def test_ignore_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14363,22 +14478,19 @@ async def test_get_config_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config() + response = await client.ignore_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetConfigRequest() + assert args[0] == cloud_deploy.IgnoreJobRequest() @pytest.mark.asyncio -async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_ignore_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14393,23 +14505,23 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_config + client._client._transport.ignore_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_config + client._client._transport.ignore_job ] = mock_object request = {} - await client.get_config(request) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_config(request) + await client.ignore_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14417,8 +14529,8 @@ async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_config_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest +async def test_ignore_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.IgnoreJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14430,48 +14542,43 @@ async def test_get_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Config( - name="name_value", - default_skaffold_version="default_skaffold_version_value", - ) + cloud_deploy.IgnoreJobResponse() ) - response = await client.get_config(request) + response = await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Config) - assert response.name == "name_value" - assert response.default_skaffold_version == "default_skaffold_version_value" + assert isinstance(response, cloud_deploy.IgnoreJobResponse) @pytest.mark.asyncio -async def test_get_config_async_from_dict(): - await test_get_config_async(request_type=dict) +async def test_ignore_job_async_from_dict(): + await test_ignore_job_async(request_type=dict) -def test_get_config_field_headers(): +def test_ignore_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = cloud_deploy.Config() - client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = cloud_deploy.IgnoreJobResponse() + client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14482,26 +14589,28 @@ def test_get_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_config_field_headers_async(): +async def test_ignore_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetConfigRequest() + request = cloud_deploy.IgnoreJobRequest() - request.name = "name_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) - await client.get_config(request) + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) + await client.ignore_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14512,35 +14621,43 @@ async def test_get_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_get_config_flattened(): +def test_ignore_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_config( - name="name_value", + client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_get_config_flattened_error(): +def test_ignore_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14548,41 +14665,53 @@ def test_get_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_get_config_flattened_async(): +async def test_ignore_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_config), "__call__") as call: + with mock.patch.object(type(client.transport.ignore_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Config() + call.return_value = cloud_deploy.IgnoreJobResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.IgnoreJobResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_config( - name="name_value", + response = await client.ignore_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rollout + mock_val = "rollout_value" + assert arg == mock_val + arg = args[0].phase_id + mock_val = "phase_id_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_config_flattened_error_async(): +async def test_ignore_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14590,20 +14719,22 @@ async def test_get_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_config( - cloud_deploy.GetConfigRequest(), - name="name_value", + await client.ignore_job( + cloud_deploy.IgnoreJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateAutomationRequest, + cloud_deploy.RetryJobRequest, dict, ], ) -def test_create_automation(request_type, transport: str = "grpc"): +def test_retry_job(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14614,24 +14745,22 @@ def test_create_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_automation(request) + call.return_value = cloud_deploy.RetryJobResponse() + response = client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) -def test_create_automation_empty_call(): +def test_retry_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -14640,19 +14769,17 @@ def test_create_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation() + client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() -def test_create_automation_non_empty_request_with_auto_populated_field(): +def test_retry_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -14663,30 +14790,28 @@ def test_create_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + request = cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_automation(request=request) + client.retry_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest( - parent="parent_value", - automation_id="automation_id_value", - request_id="request_id_value", + assert args[0] == cloud_deploy.RetryJobRequest( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) -def test_create_automation_use_cached_wrapped_rpc(): +def test_retry_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14700,27 +14825,21 @@ def test_create_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_automation in client._transport._wrapped_methods + assert client._transport.retry_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retry_job] = mock_rpc request = {} - client.create_automation(request) + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_automation(request) + client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14728,7 +14847,7 @@ def test_create_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_automation_empty_call_async(): +async def test_retry_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -14737,23 +14856,19 @@ async def test_create_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation() + response = await client.retry_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CreateAutomationRequest() + assert args[0] == cloud_deploy.RetryJobRequest() @pytest.mark.asyncio -async def test_create_automation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_retry_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -14768,27 +14883,23 @@ async def test_create_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_automation + client._client._transport.retry_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.create_automation + client._client._transport.retry_job ] = mock_object request = {} - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_automation(request) + await client.retry_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14796,8 +14907,8 @@ async def test_create_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest +async def test_retry_job_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.RetryJobRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14809,47 +14920,43 @@ async def test_create_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) - response = await client.create_automation(request) + response = await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.RetryJobResponse) @pytest.mark.asyncio -async def test_create_automation_async_from_dict(): - await test_create_automation_async(request_type=dict) +async def test_retry_job_async_from_dict(): + await test_retry_job_async(request_type=dict) -def test_create_automation_field_headers(): +def test_retry_job_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_automation(request) + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: + call.return_value = cloud_deploy.RetryJobResponse() + client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14860,30 +14967,28 @@ def test_create_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_automation_field_headers_async(): +async def test_retry_job_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CreateAutomationRequest() + request = cloud_deploy.RetryJobRequest() - request.parent = "parent_value" + request.rollout = "rollout_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.RetryJobResponse() ) - await client.create_automation(request) + await client.retry_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14894,45 +14999,43 @@ async def test_create_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "rollout=rollout_value", ) in kw["metadata"] -def test_create_automation_flattened(): +def test_retry_job_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val -def test_create_automation_flattened_error(): +def test_retry_job_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14940,55 +15043,53 @@ def test_create_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.asyncio -async def test_create_automation_flattened_async(): +async def test_retry_job_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.RetryJobResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.RetryJobResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_automation( - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + response = await client.retry_job( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].rollout + mock_val = "rollout_value" assert arg == mock_val - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") + arg = args[0].phase_id + mock_val = "phase_id_value" assert arg == mock_val - arg = args[0].automation_id - mock_val = "automation_id_value" + arg = args[0].job_id + mock_val = "job_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_automation_flattened_error_async(): +async def test_retry_job_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14996,22 +15097,22 @@ async def test_create_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_automation( - cloud_deploy.CreateAutomationRequest(), - parent="parent_value", - automation=cloud_deploy.Automation(name="name_value"), - automation_id="automation_id_value", + await client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateAutomationRequest, + cloud_deploy.ListJobRunsRequest, dict, ], ) -def test_update_automation(request_type, transport: str = "grpc"): +def test_list_job_runs(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15022,24 +15123,27 @@ def test_update_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_automation(request) + call.return_value = cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_automation_empty_call(): +def test_list_job_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15048,19 +15152,17 @@ def test_update_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation() + client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() -def test_update_automation_non_empty_request_with_auto_populated_field(): +def test_list_job_runs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15071,26 +15173,30 @@ def test_update_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + request = cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_automation(request=request) + client.list_job_runs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest( - request_id="request_id_value", + assert args[0] == cloud_deploy.ListJobRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_update_automation_use_cached_wrapped_rpc(): +def test_list_job_runs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15104,27 +15210,21 @@ def test_update_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_automation in client._transport._wrapped_methods + assert client._transport.list_job_runs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_job_runs] = mock_rpc request = {} - client.update_automation(request) + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_automation(request) + client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15132,7 +15232,7 @@ def test_update_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_automation_empty_call_async(): +async def test_list_job_runs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15141,21 +15241,22 @@ async def test_update_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation() + response = await client.list_job_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.UpdateAutomationRequest() + assert args[0] == cloud_deploy.ListJobRunsRequest() @pytest.mark.asyncio -async def test_update_automation_async_use_cached_wrapped_rpc( +async def test_list_job_runs_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15172,27 +15273,23 @@ async def test_update_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_automation + client._client._transport.list_job_runs in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.update_automation + client._client._transport.list_job_runs ] = mock_object request = {} - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_automation(request) + await client.list_job_runs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15200,8 +15297,8 @@ async def test_update_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest +async def test_list_job_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListJobRunsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15213,47 +15310,48 @@ async def test_update_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_automation(request) + response = await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListJobRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_automation_async_from_dict(): - await test_update_automation_async(request_type=dict) +async def test_list_job_runs_async_from_dict(): + await test_list_job_runs_async(request_type=dict) -def test_update_automation_field_headers(): +def test_list_job_runs_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_automation(request) + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + call.return_value = cloud_deploy.ListJobRunsResponse() + client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15264,30 +15362,28 @@ def test_update_automation_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_automation_field_headers_async(): +async def test_list_job_runs_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.UpdateAutomationRequest() + request = cloud_deploy.ListJobRunsRequest() - request.automation.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + cloud_deploy.ListJobRunsResponse() ) - await client.update_automation(request) + await client.list_job_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15298,41 +15394,35 @@ async def test_update_automation_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "automation.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_automation_flattened(): +def test_list_job_runs_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_automation_flattened_error(): +def test_list_job_runs_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15340,50 +15430,43 @@ def test_update_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_automation_flattened_async(): +async def test_list_job_runs_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.ListJobRunsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.ListJobRunsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_automation( - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_job_runs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].automation - mock_val = cloud_deploy.Automation(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_automation_flattened_error_async(): +async def test_list_job_runs_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15391,21 +15474,210 @@ async def test_update_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_automation( - cloud_deploy.UpdateAutomationRequest(), - automation=cloud_deploy.Automation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", + ) + + +def test_list_job_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_runs(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + + +def test_list_job_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_job_runs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_runs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteAutomationRequest, + cloud_deploy.GetJobRunRequest, dict, ], ) -def test_delete_automation(request_type, transport: str = "grpc"): +def test_get_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15416,24 +15688,35 @@ def test_delete_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_automation(request) + call.return_value = cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) + response = client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" -def test_delete_automation_empty_call(): +def test_get_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15442,19 +15725,17 @@ def test_delete_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation() + client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() -def test_delete_automation_non_empty_request_with_auto_populated_field(): +def test_get_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15465,30 +15746,24 @@ def test_delete_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.DeleteAutomationRequest( + request = cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_automation(request=request) + client.get_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest( + assert args[0] == cloud_deploy.GetJobRunRequest( name="name_value", - request_id="request_id_value", - etag="etag_value", ) -def test_delete_automation_use_cached_wrapped_rpc(): +def test_get_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15502,27 +15777,21 @@ def test_delete_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_automation in client._transport._wrapped_methods + assert client._transport.get_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_automation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job_run] = mock_rpc request = {} - client.delete_automation(request) + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_automation(request) + client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15530,7 +15799,7 @@ def test_delete_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_automation_empty_call_async(): +async def test_get_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15539,21 +15808,26 @@ async def test_delete_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation() + response = await client.get_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.DeleteAutomationRequest() + assert args[0] == cloud_deploy.GetJobRunRequest() @pytest.mark.asyncio -async def test_delete_automation_async_use_cached_wrapped_rpc( +async def test_get_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15570,27 +15844,23 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_automation + client._client._transport.get_job_run in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_automation + client._client._transport.get_job_run ] = mock_object request = {} - await client.delete_automation(request) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_automation(request) + await client.get_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15598,8 +15868,8 @@ async def test_delete_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest +async def test_get_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15611,47 +15881,56 @@ async def test_delete_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + ) ) - response = await client.delete_automation(request) + response = await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" @pytest.mark.asyncio -async def test_delete_automation_async_from_dict(): - await test_delete_automation_async(request_type=dict) +async def test_get_job_run_async_from_dict(): + await test_get_job_run_async(request_type=dict) -def test_delete_automation_field_headers(): +def test_get_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = cloud_deploy.JobRun() + client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -15667,25 +15946,21 @@ def test_delete_automation_field_headers(): @pytest.mark.asyncio -async def test_delete_automation_field_headers_async(): +async def test_get_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.DeleteAutomationRequest() + request = cloud_deploy.GetJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_automation(request) + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) + await client.get_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -15700,20 +15975,18 @@ async def test_delete_automation_field_headers_async(): ) in kw["metadata"] -def test_delete_automation_flattened(): +def test_get_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_automation( + client.get_job_run( name="name_value", ) @@ -15726,7 +15999,7 @@ def test_delete_automation_flattened(): assert arg == mock_val -def test_delete_automation_flattened_error(): +def test_get_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15734,31 +16007,27 @@ def test_delete_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_automation_flattened_async(): +async def test_get_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_automation), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_run), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = cloud_deploy.JobRun() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.JobRun()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_automation( + response = await client.get_job_run( name="name_value", ) @@ -15772,7 +16041,7 @@ async def test_delete_automation_flattened_async(): @pytest.mark.asyncio -async def test_delete_automation_flattened_error_async(): +async def test_get_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -15780,8 +16049,8 @@ async def test_delete_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_automation( - cloud_deploy.DeleteAutomationRequest(), + await client.get_job_run( + cloud_deploy.GetJobRunRequest(), name="name_value", ) @@ -15789,11 +16058,11 @@ async def test_delete_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRequest, + cloud_deploy.TerminateJobRunRequest, dict, ], ) -def test_get_automation(request_type, transport: str = "grpc"): +def test_terminate_job_run(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15804,35 +16073,24 @@ def test_get_automation(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) - response = client.get_automation(request) + call.return_value = cloud_deploy.TerminateJobRunResponse() + response = client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) -def test_get_automation_empty_call(): +def test_terminate_job_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -15841,17 +16099,19 @@ def test_get_automation_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation() + client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() -def test_get_automation_non_empty_request_with_auto_populated_field(): +def test_terminate_job_run_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -15862,24 +16122,26 @@ def test_get_automation_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRequest( + request = cloud_deploy.TerminateJobRunRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation(request=request) + client.terminate_job_run(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest( + assert args[0] == cloud_deploy.TerminateJobRunRequest( name="name_value", ) -def test_get_automation_use_cached_wrapped_rpc(): +def test_terminate_job_run_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15893,21 +16155,23 @@ def test_get_automation_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_automation in client._transport._wrapped_methods + assert client._transport.terminate_job_run in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc + client._transport._wrapped_methods[ + client._transport.terminate_job_run + ] = mock_rpc request = {} - client.get_automation(request) + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation(request) + client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15915,7 +16179,7 @@ def test_get_automation_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_empty_call_async(): +async def test_terminate_job_run_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -15924,26 +16188,21 @@ async def test_get_automation_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation() + response = await client.terminate_job_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRequest() + assert args[0] == cloud_deploy.TerminateJobRunRequest() @pytest.mark.asyncio -async def test_get_automation_async_use_cached_wrapped_rpc( +async def test_terminate_job_run_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -15960,23 +16219,23 @@ async def test_get_automation_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation + client._client._transport.terminate_job_run in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation + client._client._transport.terminate_job_run ] = mock_object request = {} - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -15984,8 +16243,8 @@ async def test_get_automation_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +async def test_terminate_job_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.TerminateJobRunRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15997,56 +16256,47 @@ async def test_get_automation_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - service_account="service_account_value", - ) + cloud_deploy.TerminateJobRunResponse() ) - response = await client.get_automation(request) + response = await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Automation) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True - assert response.service_account == "service_account_value" + assert isinstance(response, cloud_deploy.TerminateJobRunResponse) @pytest.mark.asyncio -async def test_get_automation_async_from_dict(): - await test_get_automation_async(request_type=dict) +async def test_terminate_job_run_async_from_dict(): + await test_terminate_job_run_async(request_type=dict) -def test_get_automation_field_headers(): +def test_terminate_job_run_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: - call.return_value = cloud_deploy.Automation() - client.get_automation(request) + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: + call.return_value = cloud_deploy.TerminateJobRunResponse() + client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16062,23 +16312,25 @@ def test_get_automation_field_headers(): @pytest.mark.asyncio -async def test_get_automation_field_headers_async(): +async def test_terminate_job_run_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRequest() + request = cloud_deploy.TerminateJobRunRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) - await client.get_automation(request) + await client.terminate_job_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16093,18 +16345,20 @@ async def test_get_automation_field_headers_async(): ) in kw["metadata"] -def test_get_automation_flattened(): +def test_terminate_job_run_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation( + client.terminate_job_run( name="name_value", ) @@ -16117,7 +16371,7 @@ def test_get_automation_flattened(): assert arg == mock_val -def test_get_automation_flattened_error(): +def test_terminate_job_run_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16125,29 +16379,31 @@ def test_get_automation_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation( - cloud_deploy.GetAutomationRequest(), + client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_automation_flattened_async(): +async def test_terminate_job_run_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + with mock.patch.object( + type(client.transport.terminate_job_run), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.Automation() + call.return_value = cloud_deploy.TerminateJobRunResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.Automation() + cloud_deploy.TerminateJobRunResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation( + response = await client.terminate_job_run( name="name_value", ) @@ -16161,7 +16417,7 @@ async def test_get_automation_flattened_async(): @pytest.mark.asyncio -async def test_get_automation_flattened_error_async(): +async def test_terminate_job_run_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16169,8 +16425,8 @@ async def test_get_automation_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation( - cloud_deploy.GetAutomationRequest(), + await client.terminate_job_run( + cloud_deploy.TerminateJobRunRequest(), name="name_value", ) @@ -16178,11 +16434,11 @@ async def test_get_automation_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationsRequest, + cloud_deploy.GetConfigRequest, dict, ], ) -def test_list_automations(request_type, transport: str = "grpc"): +def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16193,27 +16449,27 @@ def test_list_automations(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) - response = client.list_automations(request) + response = client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" -def test_list_automations_empty_call(): +def test_get_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16222,17 +16478,17 @@ def test_list_automations_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations() + client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() -def test_list_automations_non_empty_request_with_auto_populated_field(): +def test_get_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16243,30 +16499,24 @@ def test_list_automations_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.GetConfigRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automations(request=request) + client.get_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.GetConfigRequest( + name="name_value", ) -def test_list_automations_use_cached_wrapped_rpc(): +def test_get_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16280,23 +16530,21 @@ def test_list_automations_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_automations in client._transport._wrapped_methods + assert client._transport.get_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_automations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_config] = mock_rpc request = {} - client.list_automations(request) + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automations(request) + client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16304,7 +16552,7 @@ def test_list_automations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automations_empty_call_async(): +async def test_get_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16313,24 +16561,22 @@ async def test_list_automations_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations() + response = await client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationsRequest() + assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio -async def test_list_automations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -16345,23 +16591,23 @@ async def test_list_automations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automations + client._client._transport.get_config in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_automations + client._client._transport.get_config ] = mock_object request = {} - await client.list_automations(request) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_automations(request) + await client.get_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16369,8 +16615,8 @@ async def test_list_automations_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automations_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +async def test_get_config_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16382,48 +16628,48 @@ async def test_list_automations_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", ) ) - response = await client.list_automations(request) + response = await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio -async def test_list_automations_async_from_dict(): - await test_list_automations_async(request_type=dict) +async def test_get_config_async_from_dict(): + await test_get_config_async(request_type=dict) -def test_list_automations_field_headers(): +def test_get_config_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = cloud_deploy.ListAutomationsResponse() - client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = cloud_deploy.Config() + client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16434,28 +16680,26 @@ def test_list_automations_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automations_field_headers_async(): +async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationsRequest() + request = cloud_deploy.GetConfigRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) - await client.list_automations(request) + with mock.patch.object(type(client.transport.get_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) + await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -16466,35 +16710,35 @@ async def test_list_automations_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_automations_flattened(): +def test_get_config_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automations( - parent="parent_value", + client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_automations_flattened_error(): +def test_get_config_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16502,43 +16746,41 @@ def test_list_automations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", + client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_automations_flattened_async(): +async def test_get_config_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationsResponse() + call.return_value = cloud_deploy.Config() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automations( - parent="parent_value", + response = await client.get_config( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_automations_flattened_error_async(): +async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -16546,210 +16788,20 @@ async def test_list_automations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automations( - cloud_deploy.ListAutomationsRequest(), - parent="parent_value", - ) - - -def test_list_automations_pager(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automations(request={}) - - assert pager._metadata == expected_metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in results) - - -def test_list_automations_pages(transport_name: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_automations), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_automations(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_automations_async_pager(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.Automation) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automations_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationsResponse( - automations=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationsResponse( - automations=[ - cloud_deploy.Automation(), - cloud_deploy.Automation(), - ], - ), - RuntimeError, + await client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetAutomationRunRequest, + cloud_deploy.CreateAutomationRequest, dict, ], ) -def test_get_automation_run(request_type, transport: str = "grpc"): +def test_create_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16761,40 +16813,23 @@ def test_get_automation_run(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) - response = client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) -def test_get_automation_run_empty_call(): +def test_create_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -16804,18 +16839,18 @@ def test_get_automation_run_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run() + client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() -def test_get_automation_run_non_empty_request_with_auto_populated_field(): +def test_create_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -16826,26 +16861,30 @@ def test_get_automation_run_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.GetAutomationRunRequest( - name="name_value", + request = cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_automation_run(request=request) + client.create_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest( - name="name_value", + assert args[0] == cloud_deploy.CreateAutomationRequest( + parent="parent_value", + automation_id="automation_id_value", + request_id="request_id_value", ) -def test_get_automation_run_use_cached_wrapped_rpc(): +def test_create_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16859,9 +16898,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_automation_run in client._transport._wrapped_methods - ) + assert client._transport.create_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16869,15 +16906,19 @@ def test_get_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_automation_run + client._transport.create_automation ] = mock_rpc request = {} - client.get_automation_run(request) + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_automation_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16885,7 +16926,7 @@ def test_get_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_automation_run_empty_call_async(): +async def test_create_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -16895,29 +16936,20 @@ async def test_get_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run() + response = await client.create_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.GetAutomationRunRequest() + assert args[0] == cloud_deploy.CreateAutomationRequest() @pytest.mark.asyncio -async def test_get_automation_run_async_use_cached_wrapped_rpc( +async def test_create_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16934,23 +16966,27 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_automation_run + client._client._transport.create_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_automation_run + client._client._transport.create_automation ] = mock_object request = {} - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_automation_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -16958,8 +16994,8 @@ async def test_get_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_automation_run_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +async def test_create_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16972,63 +17008,46 @@ async def test_get_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun( - name="name_value", - etag="etag_value", - service_account="service_account_value", - target_id="target_id_value", - state=cloud_deploy.AutomationRun.State.SUCCEEDED, - state_description="state_description_value", - rule_id="rule_id_value", - automation_id="automation_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_automation_run(request) + response = await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AutomationRun) - assert response.name == "name_value" - assert response.etag == "etag_value" - assert response.service_account == "service_account_value" - assert response.target_id == "target_id_value" - assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED - assert response.state_description == "state_description_value" - assert response.rule_id == "rule_id_value" - assert response.automation_id == "automation_id_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_automation_run_async_from_dict(): - await test_get_automation_run_async(request_type=dict) +async def test_create_automation_async_from_dict(): + await test_create_automation_async(request_type=dict) -def test_get_automation_run_field_headers(): +def test_create_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: - call.return_value = cloud_deploy.AutomationRun() - client.get_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17039,30 +17058,30 @@ def test_get_automation_run_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_automation_run_field_headers_async(): +async def test_create_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.GetAutomationRunRequest() + request = cloud_deploy.CreateAutomationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/op") ) - await client.get_automation_run(request) + await client.create_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17073,37 +17092,45 @@ async def test_get_automation_run_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_automation_run_flattened(): +def test_create_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_automation_run( - name="name_value", + client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val -def test_get_automation_run_flattened_error(): +def test_create_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17111,45 +17138,55 @@ def test_get_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.asyncio -async def test_get_automation_run_flattened_async(): +async def test_create_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_automation_run), "__call__" + type(client.transport.create_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.AutomationRun() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.AutomationRun() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_automation_run( - name="name_value", + response = await client.create_automation( + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].automation_id + mock_val = "automation_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_automation_run_flattened_error_async(): +async def test_create_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17157,20 +17194,22 @@ async def test_get_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_automation_run( - cloud_deploy.GetAutomationRunRequest(), - name="name_value", + await client.create_automation( + cloud_deploy.CreateAutomationRequest(), + parent="parent_value", + automation=cloud_deploy.Automation(name="name_value"), + automation_id="automation_id_value", ) @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListAutomationRunsRequest, + cloud_deploy.UpdateAutomationRequest, dict, ], ) -def test_list_automation_runs(request_type, transport: str = "grpc"): +def test_update_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17182,28 +17221,23 @@ def test_list_automation_runs(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_automation_runs_empty_call(): +def test_update_automation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( @@ -17213,18 +17247,18 @@ def test_list_automation_runs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs() + client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() -def test_list_automation_runs_non_empty_request_with_auto_populated_field(): +def test_update_automation_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = CloudDeployClient( @@ -17235,32 +17269,26 @@ def test_list_automation_runs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_automation_runs(request=request) + client.update_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == cloud_deploy.UpdateAutomationRequest( + request_id="request_id_value", ) -def test_list_automation_runs_use_cached_wrapped_rpc(): +def test_update_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17274,9 +17302,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_automation_runs in client._transport._wrapped_methods - ) + assert client._transport.update_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17284,15 +17310,19 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_automation_runs + client._transport.update_automation ] = mock_rpc request = {} - client.list_automation_runs(request) + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_automation_runs(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17300,7 +17330,7 @@ def test_list_automation_runs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_automation_runs_empty_call_async(): +async def test_update_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17310,23 +17340,20 @@ async def test_list_automation_runs_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs() + response = await client.update_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.ListAutomationRunsRequest() + assert args[0] == cloud_deploy.UpdateAutomationRequest() @pytest.mark.asyncio -async def test_list_automation_runs_async_use_cached_wrapped_rpc( +async def test_update_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17343,23 +17370,27 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_automation_runs + client._client._transport.update_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_automation_runs + client._client._transport.update_automation ] = mock_object request = {} - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_automation_runs(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17367,8 +17398,8 @@ async def test_list_automation_runs_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_automation_runs_async( - transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +async def test_update_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17381,51 +17412,46 @@ async def test_list_automation_runs_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_automation_runs(request) + response = await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutomationRunsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_automation_runs_async_from_dict(): - await test_list_automation_runs_async(request_type=dict) +async def test_update_automation_async_from_dict(): + await test_update_automation_async(request_type=dict) -def test_list_automation_runs_field_headers(): +def test_update_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: - call.return_value = cloud_deploy.ListAutomationRunsResponse() - client.list_automation_runs(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17436,30 +17462,30 @@ def test_list_automation_runs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_automation_runs_field_headers_async(): +async def test_update_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.ListAutomationRunsRequest() + request = cloud_deploy.UpdateAutomationRequest() - request.parent = "parent_value" + request.automation.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_automation_runs(request) + await client.update_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -17470,37 +17496,41 @@ async def test_list_automation_runs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "automation.name=name_value", ) in kw["metadata"] -def test_list_automation_runs_flattened(): +def test_update_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_automation_runs( - parent="parent_value", + client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_automation_runs_flattened_error(): +def test_update_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17508,45 +17538,50 @@ def test_list_automation_runs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_automation_runs_flattened_async(): +async def test_update_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.update_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.ListAutomationRunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.ListAutomationRunsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_automation_runs( - parent="parent_value", + response = await client.update_automation( + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].automation + mock_val = cloud_deploy.Automation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_automation_runs_flattened_error_async(): +async def test_update_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -17554,297 +17589,104 @@ async def test_list_automation_runs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_automation_runs( - cloud_deploy.ListAutomationRunsRequest(), - parent="parent_value", + await client.update_automation( + cloud_deploy.UpdateAutomationRequest(), + automation=cloud_deploy.Automation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_automation_runs_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteAutomationRequest, + dict, + ], +) +def test_delete_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_automation_runs(request={}) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_automation(request) - assert pager._metadata == expected_metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.DeleteAutomationRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_automation_runs_pages(transport_name: str = "grpc"): +def test_delete_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_automation_runs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.delete_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.DeleteAutomationRequest() -@pytest.mark.asyncio -async def test_list_automation_runs_async_pager(): - client = CloudDeployAsyncClient( +def test_delete_automation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.DeleteAutomationRequest( + name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_automation_runs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) - - -@pytest.mark.asyncio -async def test_list_automation_runs_async_pages(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_automation_runs), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - next_page_token="abc", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[], - next_page_token="def", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListAutomationRunsResponse( - automation_runs=[ - cloud_deploy.AutomationRun(), - cloud_deploy.AutomationRun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_automation_runs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CancelAutomationRunRequest, - dict, - ], -) -def test_cancel_automation_run(request_type, transport: str = "grpc"): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() - response = client.cancel_automation_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) - - -def test_cancel_automation_run_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.cancel_automation_run() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() - - -def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cloud_deploy.CancelAutomationRunRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.cancel_automation_run(request=request) + client.delete_automation(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest( + assert args[0] == cloud_deploy.DeleteAutomationRequest( name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_cancel_automation_run_use_cached_wrapped_rpc(): +def test_delete_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17858,10 +17700,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.cancel_automation_run - in client._transport._wrapped_methods - ) + assert client._transport.delete_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17869,15 +17708,19 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_automation_run + client._transport.delete_automation ] = mock_rpc request = {} - client.cancel_automation_run(request) + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_automation_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17885,7 +17728,7 @@ def test_cancel_automation_run_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_cancel_automation_run_empty_call_async(): +async def test_delete_automation_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployAsyncClient( @@ -17895,20 +17738,20 @@ async def test_cancel_automation_run_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run() + response = await client.delete_automation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_deploy.CancelAutomationRunRequest() + assert args[0] == cloud_deploy.DeleteAutomationRequest() @pytest.mark.asyncio -async def test_cancel_automation_run_async_use_cached_wrapped_rpc( +async def test_delete_automation_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -17925,23 +17768,27 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.cancel_automation_run + client._client._transport.delete_automation in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.cancel_automation_run + client._client._transport.delete_automation ] = mock_object request = {} - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.cancel_automation_run(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -17949,9 +17796,8 @@ async def test_cancel_automation_run_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_cancel_automation_run_async( - transport: str = "grpc_asyncio", - request_type=cloud_deploy.CancelAutomationRunRequest, +async def test_delete_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteAutomationRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17964,46 +17810,46 @@ async def test_cancel_automation_run_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) - response = await client.cancel_automation_run(request) + response = await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_cancel_automation_run_async_from_dict(): - await test_cancel_automation_run_async(request_type=dict) +async def test_delete_automation_async_from_dict(): + await test_delete_automation_async(request_type=dict) -def test_cancel_automation_run_field_headers(): +def test_delete_automation_field_headers(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: - call.return_value = cloud_deploy.CancelAutomationRunResponse() - client.cancel_automation_run(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -18019,25 +17865,25 @@ def test_cancel_automation_run_field_headers(): @pytest.mark.asyncio -async def test_cancel_automation_run_field_headers_async(): +async def test_delete_automation_field_headers_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloud_deploy.CancelAutomationRunRequest() + request = cloud_deploy.DeleteAutomationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/op") ) - await client.cancel_automation_run(request) + await client.delete_automation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -18052,20 +17898,20 @@ async def test_cancel_automation_run_field_headers_async(): ) in kw["metadata"] -def test_cancel_automation_run_flattened(): +def test_delete_automation_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_automation_run( + client.delete_automation( name="name_value", ) @@ -18078,7 +17924,7 @@ def test_cancel_automation_run_flattened(): assert arg == mock_val -def test_cancel_automation_run_flattened_error(): +def test_delete_automation_flattened_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18086,31 +17932,31 @@ def test_cancel_automation_run_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_async(): +async def test_delete_automation_flattened_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.cancel_automation_run), "__call__" + type(client.transport.delete_automation), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = cloud_deploy.CancelAutomationRunResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_deploy.CancelAutomationRunResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_automation_run( + response = await client.delete_automation( name="name_value", ) @@ -18124,7 +17970,7 @@ async def test_cancel_automation_run_flattened_async(): @pytest.mark.asyncio -async def test_cancel_automation_run_flattened_error_async(): +async def test_delete_automation_flattened_error_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -18132,8 +17978,8 @@ async def test_cancel_automation_run_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.cancel_automation_run( - cloud_deploy.CancelAutomationRunRequest(), + await client.delete_automation( + cloud_deploy.DeleteAutomationRequest(), name="name_value", ) @@ -18141,52 +17987,103 @@ async def test_cancel_automation_run_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListDeliveryPipelinesRequest, + cloud_deploy.GetAutomationRequest, dict, ], ) -def test_list_delivery_pipelines_rest(request_type): +def test_get_automation(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", ) + response = client.get_automation(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_delivery_pipelines(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeliveryPipelinesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" -def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): +def test_get_automation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() + + +def test_get_automation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest( + name="name_value", + ) + + +def test_get_automation_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -18194,137 +18091,3830 @@ def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_delivery_pipelines - in client._transport._wrapped_methods - ) + assert client._transport.get_automation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_delivery_pipelines - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_automation] = mock_rpc request = {} - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_delivery_pipelines(request) + client.get_automation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_delivery_pipelines_rest_required_fields( - request_type=cloud_deploy.ListDeliveryPipelinesRequest, -): - transport_class = transports.CloudDeployRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_automation_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_get_automation_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.get_automation + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation + ] = mock_object - # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + request = {} + await client.get_automation(request) - response_value = Response() - response_value.status_code = 200 + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + await client.get_automation(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 - response = client.list_delivery_pipelines(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] +@pytest.mark.asyncio +async def test_get_automation_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + service_account="service_account_value", + ) + ) + response = await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Automation) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_automation_async_from_dict(): + await test_get_automation_async(request_type=dict) + + +def test_get_automation_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = cloud_deploy.Automation() + client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + await client.get_automation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_automation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.Automation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.Automation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation( + cloud_deploy.GetAutomationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationsRequest, + dict, + ], +) +def test_list_automations(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +def test_list_automations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_automations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automations + ] = mock_rpc + request = {} + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationsRequest() + + +@pytest.mark.asyncio +async def test_list_automations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automations + ] = mock_object + + request = {} + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_automations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automations_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automations_async_from_dict(): + await test_list_automations_async(request_type=dict) + + +def test_list_automations_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = cloud_deploy.ListAutomationsResponse() + client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automations_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + await client.list_automations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automations_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automations_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automations_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automations_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automations( + cloud_deploy.ListAutomationsRequest(), + parent="parent_value", + ) + + +def test_list_automations_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automations(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in results) + + +def test_list_automations_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_automations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automations_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.Automation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automations_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationsResponse( + automations=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationsResponse( + automations=[ + cloud_deploy.Automation(), + cloud_deploy.Automation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetAutomationRunRequest, + dict, + ], +) +def test_get_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + response = client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +def test_get_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +def test_get_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest( + name="name_value", + ) + + +def test_get_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_automation_run in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_automation_run + ] = mock_rpc + request = {} + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.GetAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_get_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_automation_run + ] = mock_object + + request = {} + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_automation_run_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.GetAutomationRunRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun( + name="name_value", + etag="etag_value", + service_account="service_account_value", + target_id="target_id_value", + state=cloud_deploy.AutomationRun.State.SUCCEEDED, + state_description="state_description_value", + rule_id="rule_id_value", + automation_id="automation_id_value", + ) + ) + response = await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.GetAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AutomationRun) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service_account == "service_account_value" + assert response.target_id == "target_id_value" + assert response.state == cloud_deploy.AutomationRun.State.SUCCEEDED + assert response.state_description == "state_description_value" + assert response.rule_id == "rule_id_value" + assert response.automation_id == "automation_id_value" + + +@pytest.mark.asyncio +async def test_get_automation_run_async_from_dict(): + await test_get_automation_run_async(request_type=dict) + + +def test_get_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.AutomationRun() + client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.GetAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + await client.get_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.AutomationRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.AutomationRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_automation_run( + cloud_deploy.GetAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListAutomationRunsRequest, + dict, + ], +) +def test_list_automation_runs(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_automation_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +def test_list_automation_runs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_automation_runs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_automation_runs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_automation_runs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_automation_runs + ] = mock_rpc + request = {} + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.ListAutomationRunsRequest() + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_automation_runs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_automation_runs + ] = mock_object + + request = {} + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_automation_runs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_automation_runs_async( + transport: str = "grpc_asyncio", request_type=cloud_deploy.ListAutomationRunsRequest +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.ListAutomationRunsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutomationRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_from_dict(): + await test_list_automation_runs_async(request_type=dict) + + +def test_list_automation_runs_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = cloud_deploy.ListAutomationRunsResponse() + client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_automation_runs_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.ListAutomationRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + await client.list_automation_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_automation_runs_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_automation_runs_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.ListAutomationRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.ListAutomationRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_automation_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_automation_runs_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_automation_runs( + cloud_deploy.ListAutomationRunsRequest(), + parent="parent_value", + ) + + +def test_list_automation_runs_pager(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_automation_runs(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in results) + + +def test_list_automation_runs_pages(transport_name: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_automation_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pager(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_automation_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_deploy.AutomationRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_automation_runs_async_pages(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_automation_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[], + next_page_token="def", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListAutomationRunsResponse( + automation_runs=[ + cloud_deploy.AutomationRun(), + cloud_deploy.AutomationRun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_automation_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CancelAutomationRunRequest, + dict, + ], +) +def test_cancel_automation_run(request_type, transport: str = "grpc"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + response = client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +def test_cancel_automation_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +def test_cancel_automation_run_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_automation_run(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest( + name="name_value", + ) + + +def test_cancel_automation_run_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_automation_run + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_automation_run + ] = mock_rpc + request = {} + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_deploy.CancelAutomationRunRequest() + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_automation_run + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_automation_run + ] = mock_object + + request = {} + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.cancel_automation_run(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async( + transport: str = "grpc_asyncio", + request_type=cloud_deploy.CancelAutomationRunRequest, +): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + response = await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_deploy.CancelAutomationRunRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.CancelAutomationRunResponse) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_async_from_dict(): + await test_cancel_automation_run_async(request_type=dict) + + +def test_cancel_automation_run_field_headers(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = cloud_deploy.CancelAutomationRunResponse() + client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_automation_run_field_headers_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_deploy.CancelAutomationRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + await client.cancel_automation_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_automation_run_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_automation_run_flattened_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_automation_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_deploy.CancelAutomationRunResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_deploy.CancelAutomationRunResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_automation_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_automation_run_flattened_error_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_automation_run( + cloud_deploy.CancelAutomationRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeliveryPipelinesRequest, + dict, + ], +) +def test_list_delivery_pipelines_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_delivery_pipelines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeliveryPipelinesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_delivery_pipelines_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_delivery_pipelines + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_delivery_pipelines + ] = mock_rpc + + request = {} + client.list_delivery_pipelines(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_delivery_pipelines(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_delivery_pipelines_rest_required_fields( + request_type=cloud_deploy.ListDeliveryPipelinesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_delivery_pipelines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_delivery_pipelines_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_delivery_pipelines_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( + cloud_deploy.ListDeliveryPipelinesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( + cloud_deploy.ListDeliveryPipelinesResponse() + ) + + request = cloud_deploy.ListDeliveryPipelinesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + client.list_delivery_pipelines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_delivery_pipelines_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_delivery_pipelines(request) + + +def test_list_delivery_pipelines_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_delivery_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_delivery_pipelines( + cloud_deploy.ListDeliveryPipelinesRequest(), + parent="parent_value", + ) + + +def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[], + next_page_token="def", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_delivery_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) + + pages = list(client.list_delivery_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetDeliveryPipelineRequest, + dict, + ], +) +def test_get_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.DeliveryPipeline) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + + +def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_delivery_pipeline + ] = mock_rpc + + request = {} + client.get_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.GetDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( + cloud_deploy.GetDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( + cloud_deploy.DeliveryPipeline() + ) + + request = cloud_deploy.GetDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.DeliveryPipeline() + + client.get_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_delivery_pipeline(request) + + +def test_get_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_delivery_pipeline( + cloud_deploy.GetDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_get_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateDeliveryPipelineRequest, + dict, + ], +) +def test_create_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_delivery_pipeline + ] = mock_rpc + + request = {} + client.create_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.CreateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["delivery_pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deliveryPipelineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deliveryPipelineId" in jsonified_request + assert ( + jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delivery_pipeline_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deliveryPipelineId" in jsonified_request + assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_delivery_pipeline(request) + + expected_params = [ + ( + "deliveryPipelineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deliveryPipelineId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deliveryPipelineId", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( + cloud_deploy.CreateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_delivery_pipeline(request) + + +def test_create_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_delivery_pipeline( + cloud_deploy.CreateDeliveryPipelineRequest(), + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + + +def test_create_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateDeliveryPipelineRequest, + dict, + ], +) +def test_update_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [{"values": {}, "match_target_labels": {}}], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ + "delivery_pipeline" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["delivery_pipeline"][field])): + del request_init["delivery_pipeline"][field][i][subfield] + else: + del request_init["delivery_pipeline"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_delivery_pipeline + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_delivery_pipeline + ] = mock_rpc + + request = {} + client.update_delivery_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_delivery_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.UpdateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_delivery_pipelines_rest_unset_required_fields(): +def test_update_delivery_pipeline_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deliveryPipeline", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_delivery_pipelines_rest_interceptors(null_interceptor): +def test_update_delivery_pipeline_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18337,14 +21927,16 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( - cloud_deploy.ListDeliveryPipelinesRequest() + pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( + cloud_deploy.UpdateDeliveryPipelineRequest() ) transcode.return_value = { "method": "post", @@ -18356,19 +21948,19 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( - cloud_deploy.ListDeliveryPipelinesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListDeliveryPipelinesRequest() + request = cloud_deploy.UpdateDeliveryPipelineRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + post.return_value = operations_pb2.Operation() - client.list_delivery_pipelines( + client.update_delivery_pipeline( request, metadata=[ ("key", "val"), @@ -18380,8 +21972,8 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_delivery_pipelines_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +def test_update_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18389,7 +21981,11 @@ def test_list_delivery_pipelines_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18401,10 +21997,10 @@ def test_list_delivery_pipelines_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_delivery_pipelines(request) + client.update_delivery_pipeline(request) -def test_list_delivery_pipelines_rest_flattened(): +def test_update_delivery_pipeline_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18413,40 +22009,43 @@ def test_list_delivery_pipelines_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListDeliveryPipelinesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_delivery_pipelines(**mock_args) + client.update_delivery_pipeline(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" % client.transport._host, args[1], ) -def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): +def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18455,83 +22054,27 @@ def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_delivery_pipelines( - cloud_deploy.ListDeliveryPipelinesRequest(), - parent="parent_value", + client.update_delivery_pipeline( + cloud_deploy.UpdateDeliveryPipelineRequest(), + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): +def test_update_delivery_pipeline_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="abc", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[], - next_page_token="def", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListDeliveryPipelinesResponse( - delivery_pipelines=[ - cloud_deploy.DeliveryPipeline(), - cloud_deploy.DeliveryPipeline(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_delivery_pipelines(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) - - pages = list(client.list_delivery_pipelines(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetDeliveryPipelineRequest, + cloud_deploy.DeleteDeliveryPipelineRequest, dict, ], ) -def test_get_delivery_pipeline_rest(request_type): +def test_delete_delivery_pipeline_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18546,35 +22089,22 @@ def test_get_delivery_pipeline_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline( - name="name_value", - uid="uid_value", - description="description_value", - etag="etag_value", - suspended=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.delete_delivery_pipeline(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.DeliveryPipeline) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.suspended is True + assert response.operation.name == "operations/spam" -def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18589,7 +22119,7 @@ def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_delivery_pipeline + client._transport.delete_delivery_pipeline in client._transport._wrapped_methods ) @@ -18599,24 +22129,28 @@ def test_get_delivery_pipeline_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_delivery_pipeline + client._transport.delete_delivery_pipeline ] = mock_rpc request = {} - client.get_delivery_pipeline(request) + client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_delivery_pipeline(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_delivery_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.GetDeliveryPipelineRequest, +def test_delete_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.DeleteDeliveryPipelineRequest, ): transport_class = transports.CloudDeployRestTransport @@ -18632,7 +22166,7 @@ def test_get_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18641,7 +22175,17 @@ def test_get_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18655,7 +22199,7 @@ def test_get_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18667,39 +22211,47 @@ def test_get_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_delivery_pipeline(request) + response = client.delete_delivery_pipeline(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_delivery_pipeline_rest_unset_required_fields(): +def test_delete_delivery_pipeline_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_delivery_pipeline_rest_interceptors(null_interceptor): +def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18712,14 +22264,16 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( - cloud_deploy.GetDeliveryPipelineRequest() + pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( + cloud_deploy.DeleteDeliveryPipelineRequest() ) transcode.return_value = { "method": "post", @@ -18731,19 +22285,19 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( - cloud_deploy.DeliveryPipeline() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.GetDeliveryPipelineRequest() + request = cloud_deploy.DeleteDeliveryPipelineRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.DeliveryPipeline() + post.return_value = operations_pb2.Operation() - client.get_delivery_pipeline( + client.delete_delivery_pipeline( request, metadata=[ ("key", "val"), @@ -18755,8 +22309,8 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +def test_delete_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18778,10 +22332,10 @@ def test_get_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_delivery_pipeline(request) + client.delete_delivery_pipeline(request) -def test_get_delivery_pipeline_rest_flattened(): +def test_delete_delivery_pipeline_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18790,7 +22344,7 @@ def test_get_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.DeliveryPipeline() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -18806,13 +22360,11 @@ def test_get_delivery_pipeline_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.DeliveryPipeline.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_delivery_pipeline(**mock_args) + client.delete_delivery_pipeline(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -18825,7 +22377,7 @@ def test_get_delivery_pipeline_rest_flattened(): ) -def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18834,13 +22386,13 @@ def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_delivery_pipeline( - cloud_deploy.GetDeliveryPipelineRequest(), + client.delete_delivery_pipeline( + cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value", ) -def test_get_delivery_pipeline_rest_error(): +def test_delete_delivery_pipeline_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18849,11 +22401,11 @@ def test_get_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateDeliveryPipelineRequest, + cloud_deploy.ListTargetsRequest, dict, ], ) -def test_create_delivery_pipeline_rest(request_type): +def test_list_targets_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18861,196 +22413,34 @@ def test_create_delivery_pipeline_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["delivery_pipeline"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListTargetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.list_targets(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListTargetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_list_targets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19064,45 +22454,35 @@ def test_create_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.list_targets in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc request = {} - client.create_delivery_pipeline(request) + client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_delivery_pipeline(request) + client.list_targets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.CreateDeliveryPipelineRequest, +def test_list_targets_rest_required_fields( + request_type=cloud_deploy.ListTargetsRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" - request_init["delivery_pipeline_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19110,31 +22490,26 @@ def test_create_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped - assert "deliveryPipelineId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deliveryPipelineId" in jsonified_request - assert ( - jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] - ) jsonified_request["parent"] = "parent_value" - jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).list_targets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "delivery_pipeline_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -19142,8 +22517,6 @@ def test_create_delivery_pipeline_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "deliveryPipelineId" in jsonified_request - assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19152,7 +22525,7 @@ def test_create_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListTargetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19164,58 +22537,49 @@ def test_create_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_delivery_pipeline(request) + response = client.list_targets(request) - expected_params = [ - ( - "deliveryPipelineId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_delivery_pipeline_rest_unset_required_fields(): +def test_list_targets_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.list_targets._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "deliveryPipelineId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "deliveryPipelineId", - "deliveryPipeline", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_delivery_pipeline_rest_interceptors(null_interceptor): +def test_list_targets_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19228,16 +22592,14 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_list_targets" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_list_targets" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( - cloud_deploy.CreateDeliveryPipelineRequest() + pb_message = cloud_deploy.ListTargetsRequest.pb( + cloud_deploy.ListTargetsRequest() ) transcode.return_value = { "method": "post", @@ -19249,19 +22611,19 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( + cloud_deploy.ListTargetsResponse() ) - request = cloud_deploy.CreateDeliveryPipelineRequest() + request = cloud_deploy.ListTargetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.ListTargetsResponse() - client.create_delivery_pipeline( + client.list_targets( request, metadata=[ ("key", "val"), @@ -19273,8 +22635,8 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +def test_list_targets_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19294,10 +22656,10 @@ def test_create_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_delivery_pipeline(request) + client.list_targets(request) -def test_create_delivery_pipeline_rest_flattened(): +def test_list_targets_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19306,7 +22668,7 @@ def test_create_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListTargetsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -19314,32 +22676,31 @@ def test_create_delivery_pipeline_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_delivery_pipeline(**mock_args) + client.list_targets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_list_targets_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19348,28 +22709,81 @@ def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_delivery_pipeline( - cloud_deploy.CreateDeliveryPipelineRequest(), + client.list_targets( + cloud_deploy.ListTargetsRequest(), parent="parent_value", - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - delivery_pipeline_id="delivery_pipeline_id_value", ) -def test_create_delivery_pipeline_rest_error(): +def test_list_targets_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + next_page_token="abc", + ), + cloud_deploy.ListTargetsResponse( + targets=[], + next_page_token="def", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_targets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Target) for i in results) + + pages = list(client.list_targets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateDeliveryPipelineRequest, + cloud_deploy.RollbackTargetRequest, dict, ], ) -def test_update_delivery_pipeline_rest(request_type): +def test_rollback_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19377,200 +22791,370 @@ def test_update_delivery_pipeline_rest(request_type): # send a request that will satisfy transcoding request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } - request_init["delivery_pipeline"] = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [{"values": {}, "match_target_labels": {}}], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": ["missing_targets_value1", "missing_targets_value2"], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback_target(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.RollbackTargetResponse) + + +def test_rollback_target_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_target in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc + + request = {} + client.rollback_target(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_target(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_rollback_target_rest_required_fields( + request_type=cloud_deploy.RollbackTargetRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request_init["target_id"] = "" + request_init["rollout_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["targetId"] = "target_id_value" + jsonified_request["rolloutId"] = "rollout_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == "rollout_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rollback_target(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rollback_target_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rollback_target._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "targetId", + "rolloutId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_target_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_rollback_target" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_rollback_target" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.RollbackTargetRequest.pb( + cloud_deploy.RollbackTargetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( + cloud_deploy.RollbackTargetResponse() + ) + + request = cloud_deploy.RollbackTargetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.RollbackTargetResponse() + + client.rollback_target( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateDeliveryPipelineRequest.meta.fields[ - "delivery_pipeline" - ] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback_target(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +def test_rollback_target_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RollbackTargetResponse() - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - subfields_not_in_runtime = [] + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + mock_args.update(sample_request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["delivery_pipeline"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + client.rollback_target(**mock_args) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["delivery_pipeline"][field])): - del request_init["delivery_pipeline"][field][i][subfield] - else: - del request_init["delivery_pipeline"][field][subfield] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" + % client.transport._host, + args[1], + ) + + +def test_rollback_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_target( + cloud_deploy.RollbackTargetRequest(), + name="name_value", + target_id="target_id_value", + rollout_id="rollout_id_value", + ) + + +def test_rollback_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetTargetRequest, + dict, + ], +) +def test_get_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target( + name="name_value", + target_id="target_id_value", + uid="uid_value", + description="description_value", + require_approval=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Target) + assert response.name == "name_value" + assert response.target_id == "target_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.require_approval is True + assert response.etag == "etag_value" -def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_get_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19584,43 +23168,33 @@ def test_update_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.get_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_target] = mock_rpc request = {} - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_delivery_pipeline(request) + client.get_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.UpdateDeliveryPipelineRequest, -): +def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19631,26 +23205,21 @@ def test_update_delivery_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "request_id", - "update_mask", - "validate_only", - ) - ) + ).get_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19659,7 +23228,7 @@ def test_update_delivery_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19671,52 +23240,39 @@ def test_update_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_delivery_pipeline(request) + response = client.get_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_delivery_pipeline_rest_unset_required_fields(): +def test_get_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "requestId", - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "deliveryPipeline", - ) - ) - ) + unset_fields = transport.get_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_delivery_pipeline_rest_interceptors(null_interceptor): +def test_get_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19729,17 +23285,13 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_get_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_get_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( - cloud_deploy.UpdateDeliveryPipelineRequest() - ) + pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19750,19 +23302,17 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) - request = cloud_deploy.UpdateDeliveryPipelineRequest() + request = cloud_deploy.GetTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Target() - client.update_delivery_pipeline( + client.get_target( request, metadata=[ ("key", "val"), @@ -19774,8 +23324,8 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +def test_get_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19783,11 +23333,7 @@ def test_update_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19799,10 +23345,10 @@ def test_update_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_delivery_pipeline(request) + client.get_target(request) -def test_update_delivery_pipeline_rest_flattened(): +def test_get_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19811,43 +23357,39 @@ def test_update_delivery_pipeline_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Target() # get arguments that satisfy an http rule for this method - sample_request = { - "delivery_pipeline": { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Target.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_delivery_pipeline(**mock_args) + client.get_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_get_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19856,14 +23398,13 @@ def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_delivery_pipeline( - cloud_deploy.UpdateDeliveryPipelineRequest(), - delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_target( + cloud_deploy.GetTargetRequest(), + name="name_value", ) -def test_update_delivery_pipeline_rest_error(): +def test_get_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19872,20 +23413,126 @@ def test_update_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteDeliveryPipelineRequest, + cloud_deploy.CreateTargetRequest, dict, ], ) -def test_delete_delivery_pipeline_rest(request_type): +def test_create_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19900,13 +23547,13 @@ def test_delete_delivery_pipeline_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): +def test_create_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19920,22 +23567,17 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_delivery_pipeline - in client._transport._wrapped_methods - ) + assert client._transport.create_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_delivery_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_target] = mock_rpc request = {} - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -19944,20 +23586,21 @@ def test_delete_delivery_pipeline_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_delivery_pipeline(request) + client.create_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_delivery_pipeline_rest_required_fields( - request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +def test_create_target_rest_required_fields( + request_type=cloud_deploy.CreateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["target_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19965,34 +23608,38 @@ def test_delete_delivery_pipeline_rest_required_fields( ) # verify fields with default values are dropped + assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["targetId"] = "target_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + ).create_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", - "force", "request_id", + "target_id", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20013,9 +23660,10 @@ def test_delete_delivery_pipeline_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20025,35 +23673,45 @@ def test_delete_delivery_pipeline_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_delivery_pipeline(request) + response = client.create_target(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "targetId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_delivery_pipeline_rest_unset_required_fields(): +def test_create_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + unset_fields = transport.create_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", - "force", "requestId", + "targetId", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "parent", + "targetId", + "target", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): +def test_create_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20068,14 +23726,14 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "post_create_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" + transports.CloudDeployRestInterceptor, "pre_create_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( - cloud_deploy.DeleteDeliveryPipelineRequest() + pb_message = cloud_deploy.CreateTargetRequest.pb( + cloud_deploy.CreateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20091,7 +23749,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteDeliveryPipelineRequest() + request = cloud_deploy.CreateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20099,7 +23757,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_delivery_pipeline( + client.create_target( request, metadata=[ ("key", "val"), @@ -20111,8 +23769,8 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_delivery_pipeline_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest +def test_create_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20120,9 +23778,7 @@ def test_delete_delivery_pipeline_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20134,10 +23790,10 @@ def test_delete_delivery_pipeline_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_delivery_pipeline(request) + client.create_target(request) -def test_delete_delivery_pipeline_rest_flattened(): +def test_create_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20149,13 +23805,13 @@ def test_delete_delivery_pipeline_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) mock_args.update(sample_request) @@ -20166,20 +23822,19 @@ def test_delete_delivery_pipeline_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_delivery_pipeline(**mock_args) + client.create_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, args[1], ) -def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): +def test_create_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20188,13 +23843,15 @@ def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_delivery_pipeline( - cloud_deploy.DeleteDeliveryPipelineRequest(), - name="name_value", + client.create_target( + cloud_deploy.CreateTargetRequest(), + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", ) -def test_delete_delivery_pipeline_rest_error(): +def test_create_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20203,46 +23860,149 @@ def test_delete_delivery_pipeline_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListTargetsRequest, + cloud_deploy.UpdateTargetRequest, dict, ], ) -def test_list_targets_rest(request_type): +def test_update_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target"][field])): + del request_init["target"][field][i][subfield] + else: + del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTargetsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_targets_rest_use_cached_wrapped_rpc(): +def test_update_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20256,35 +24016,38 @@ def test_list_targets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_targets in client._transport._wrapped_methods + assert client._transport.update_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_targets] = mock_rpc + client._transport._wrapped_methods[client._transport.update_target] = mock_rpc request = {} - client.list_targets(request) + client.update_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_targets(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_targets_rest_required_fields( - request_type=cloud_deploy.ListTargetsRequest, +def test_update_target_rest_required_fields( + request_type=cloud_deploy.UpdateTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20295,30 +24058,26 @@ def test_list_targets_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_targets._get_unset_required_fields(jsonified_request) + ).update_target._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "request_id", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20327,7 +24086,7 @@ def test_list_targets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20339,49 +24098,52 @@ def test_list_targets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_targets(request) + response = client.update_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_targets_rest_unset_required_fields(): +def test_update_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_targets._get_unset_required_fields({}) + unset_fields = transport.update_target._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "target", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_targets_rest_interceptors(null_interceptor): +def test_update_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20394,14 +24156,16 @@ def test_list_targets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_targets" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_targets" + transports.CloudDeployRestInterceptor, "pre_update_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListTargetsRequest.pb( - cloud_deploy.ListTargetsRequest() + pb_message = cloud_deploy.UpdateTargetRequest.pb( + cloud_deploy.UpdateTargetRequest() ) transcode.return_value = { "method": "post", @@ -20413,19 +24177,19 @@ def test_list_targets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( - cloud_deploy.ListTargetsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListTargetsRequest() + request = cloud_deploy.UpdateTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListTargetsResponse() + post.return_value = operations_pb2.Operation() - client.list_targets( + client.update_target( request, metadata=[ ("key", "val"), @@ -20437,8 +24201,8 @@ def test_list_targets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_targets_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest +def test_update_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20446,7 +24210,9 @@ def test_list_targets_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20458,10 +24224,10 @@ def test_list_targets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_targets(request) + client.update_target(request) -def test_list_targets_rest_flattened(): +def test_update_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20470,39 +24236,41 @@ def test_list_targets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListTargetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListTargetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_targets(**mock_args) + client.update_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{target.name=projects/*/locations/*/targets/*}" + % client.transport._host, args[1], ) -def test_list_targets_rest_flattened_error(transport: str = "rest"): +def test_update_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20511,113 +24279,55 @@ def test_list_targets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_targets( - cloud_deploy.ListTargetsRequest(), - parent="parent_value", + client.update_target( + cloud_deploy.UpdateTargetRequest(), + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_targets_rest_pager(transport: str = "rest"): +def test_update_target_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - next_page_token="abc", - ), - cloud_deploy.ListTargetsResponse( - targets=[], - next_page_token="def", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListTargetsResponse( - targets=[ - cloud_deploy.Target(), - cloud_deploy.Target(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_targets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Target) for i in results) - - pages = list(client.list_targets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.RollbackTargetRequest, + cloud_deploy.DeleteTargetRequest, dict, ], ) -def test_rollback_target_rest(request_type): +def test_delete_target_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.RollbackTargetResponse) + assert response.operation.name == "operations/spam" -def test_rollback_target_rest_use_cached_wrapped_rpc(): +def test_delete_target_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20631,37 +24341,39 @@ def test_rollback_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.rollback_target in client._transport._wrapped_methods + assert client._transport.delete_target in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.rollback_target] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc request = {} - client.rollback_target(request) + client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.rollback_target(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_target(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_rollback_target_rest_required_fields( - request_type=cloud_deploy.RollbackTargetRequest, +def test_delete_target_rest_required_fields( + request_type=cloud_deploy.DeleteTargetRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["name"] = "" - request_init["target_id"] = "" - request_init["rollout_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20672,27 +24384,30 @@ def test_rollback_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["targetId"] = "target_id_value" - jsonified_request["rolloutId"] = "rollout_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).rollback_target._get_unset_required_fields(jsonified_request) + ).delete_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" - assert "rolloutId" in jsonified_request - assert jsonified_request["rolloutId"] == "rollout_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20701,7 +24416,7 @@ def test_rollback_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20713,49 +24428,46 @@ def test_rollback_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.rollback_target(request) + response = client.delete_target(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_rollback_target_rest_unset_required_fields(): +def test_delete_target_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.rollback_target._get_unset_required_fields({}) + unset_fields = transport.delete_target._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "name", - "targetId", - "rolloutId", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_target_rest_interceptors(null_interceptor): +def test_delete_target_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20768,14 +24480,16 @@ def test_rollback_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_rollback_target" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_rollback_target" + transports.CloudDeployRestInterceptor, "pre_delete_target" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.RollbackTargetRequest.pb( - cloud_deploy.RollbackTargetRequest() + pb_message = cloud_deploy.DeleteTargetRequest.pb( + cloud_deploy.DeleteTargetRequest() ) transcode.return_value = { "method": "post", @@ -20787,19 +24501,19 @@ def test_rollback_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.RollbackTargetResponse.to_json( - cloud_deploy.RollbackTargetResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.RollbackTargetRequest() + request = cloud_deploy.DeleteTargetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.RollbackTargetResponse() + post.return_value = operations_pb2.Operation() - client.rollback_target( + client.delete_target( request, metadata=[ ("key", "val"), @@ -20811,8 +24525,8 @@ def test_rollback_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_rollback_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.RollbackTargetRequest +def test_delete_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20820,9 +24534,7 @@ def test_rollback_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20834,10 +24546,10 @@ def test_rollback_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.rollback_target(request) + client.delete_target(request) -def test_rollback_target_rest_flattened(): +def test_delete_target_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20846,44 +24558,37 @@ def test_rollback_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.RollbackTargetResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.RollbackTargetResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.rollback_target(**mock_args) + client.delete_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}:rollbackTarget" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, args[1], ) -def test_rollback_target_rest_flattened_error(transport: str = "rest"): +def test_delete_target_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20892,15 +24597,13 @@ def test_rollback_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.rollback_target( - cloud_deploy.RollbackTargetRequest(), + client.delete_target( + cloud_deploy.DeleteTargetRequest(), name="name_value", - target_id="target_id_value", - rollout_id="rollout_id_value", ) -def test_rollback_target_rest_error(): +def test_delete_target_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20909,54 +24612,46 @@ def test_rollback_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetTargetRequest, + cloud_deploy.ListCustomTargetTypesRequest, dict, ], ) -def test_get_target_rest(request_type): +def test_list_custom_target_types_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target( - name="name_value", - target_id="target_id_value", - uid="uid_value", - description="description_value", - require_approval=True, - etag="etag_value", + return_value = cloud_deploy.ListCustomTargetTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Target) - assert response.name == "name_value" - assert response.target_id == "target_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.require_approval is True - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListCustomTargetTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_target_rest_use_cached_wrapped_rpc(): +def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20970,33 +24665,40 @@ def test_get_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_target in client._transport._wrapped_methods + assert ( + client._transport.list_custom_target_types + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_custom_target_types + ] = mock_rpc request = {} - client.get_target(request) + client.list_custom_target_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_target(request) + client.list_custom_target_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): +def test_list_custom_target_types_rest_required_fields( + request_type=cloud_deploy.ListCustomTargetTypesRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21007,21 +24709,30 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_target._get_unset_required_fields(jsonified_request) + ).list_custom_target_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21030,7 +24741,7 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21051,30 +24762,40 @@ def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_target(request) + response = client.list_custom_target_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_target_rest_unset_required_fields(): +def test_list_custom_target_types_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_target._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_target_rest_interceptors(null_interceptor): +def test_list_custom_target_types_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21087,13 +24808,15 @@ def test_get_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_target" + transports.CloudDeployRestInterceptor, "post_list_custom_target_types" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_target" + transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) + pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( + cloud_deploy.ListCustomTargetTypesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21104,17 +24827,19 @@ def test_get_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) + req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( + cloud_deploy.ListCustomTargetTypesResponse() + ) - request = cloud_deploy.GetTargetRequest() + request = cloud_deploy.ListCustomTargetTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Target() + post.return_value = cloud_deploy.ListCustomTargetTypesResponse() - client.get_target( + client.list_custom_target_types( request, metadata=[ ("key", "val"), @@ -21126,8 +24851,8 @@ def test_get_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetTargetRequest +def test_list_custom_target_types_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21135,7 +24860,7 @@ def test_get_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21147,10 +24872,10 @@ def test_get_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_target(request) + client.list_custom_target_types(request) -def test_get_target_rest_flattened(): +def test_list_custom_target_types_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21159,14 +24884,14 @@ def test_get_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Target() + return_value = cloud_deploy.ListCustomTargetTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -21174,24 +24899,25 @@ def test_get_target_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.Target.pb(return_value) + return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_target(**mock_args) + client.list_custom_target_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + % client.transport._host, args[1], ) -def test_get_target_rest_flattened_error(transport: str = "rest"): +def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21200,158 +24926,126 @@ def test_get_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_target( - cloud_deploy.GetTargetRequest(), - name="name_value", + client.list_custom_target_types( + cloud_deploy.ListCustomTargetTypesRequest(), + parent="parent_value", ) -def test_get_target_rest_error(): +def test_list_custom_target_types_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + next_page_token="abc", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[], + next_page_token="def", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListCustomTargetTypesResponse( + custom_target_types=[ + cloud_deploy.CustomTargetType(), + cloud_deploy.CustomTargetType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_custom_target_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) + + pages = list(client.list_custom_target_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateTargetRequest, + cloud_deploy.GetCustomTargetTypeRequest, dict, ], ) -def test_create_target_rest(request_type): +def test_get_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["target"] = { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "gke": {"cluster": "cluster_value", "internal_ip": True}, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateTargetRequest.meta.fields["target"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] - else: - del request_init["target"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType( + name="name_value", + custom_target_type_id="custom_target_type_id_value", + uid="uid_value", + description="description_value", + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.CustomTargetType) + assert response.name == "name_value" + assert response.custom_target_type_id == "custom_target_type_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" -def test_create_target_rest_use_cached_wrapped_rpc(): +def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21365,40 +25059,40 @@ def test_create_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_target in client._transport._wrapped_methods + assert ( + client._transport.get_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_custom_target_type + ] = mock_rpc request = {} - client.create_target(request) + client.get_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_target(request) + client.get_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_target_rest_required_fields( - request_type=cloud_deploy.CreateTargetRequest, +def test_get_custom_target_type_rest_required_fields( + request_type=cloud_deploy.GetCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["target_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21406,38 +25100,24 @@ def test_create_target_rest_required_fields( ) # verify fields with default values are dropped - assert "targetId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == request_init["target_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["targetId"] = "target_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_target._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "target_id", - "validate_only", - ) - ) + ).get_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "targetId" in jsonified_request - assert jsonified_request["targetId"] == "target_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21446,7 +25126,7 @@ def test_create_target_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21458,58 +25138,39 @@ def test_create_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_target(request) + response = client.get_custom_target_type(request) - expected_params = [ - ( - "targetId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_target_rest_unset_required_fields(): +def test_get_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_target._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "targetId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "targetId", - "target", - ) - ) - ) + unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_target_rest_interceptors(null_interceptor): +def test_get_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21522,16 +25183,14 @@ def test_create_target_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_target" + transports.CloudDeployRestInterceptor, "post_get_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_target" + transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateTargetRequest.pb( - cloud_deploy.CreateTargetRequest() + pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( + cloud_deploy.GetCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -21543,19 +25202,19 @@ def test_create_target_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.CustomTargetType.to_json( + cloud_deploy.CustomTargetType() ) - request = cloud_deploy.CreateTargetRequest() + request = cloud_deploy.GetCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.CustomTargetType() - client.create_target( + client.get_custom_target_type( request, metadata=[ ("key", "val"), @@ -21567,8 +25226,8 @@ def test_create_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest +def test_get_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21576,7 +25235,9 @@ def test_create_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21588,10 +25249,10 @@ def test_create_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_target(request) + client.get_custom_target_type(request) -def test_create_target_rest_flattened(): +def test_get_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21600,39 +25261,42 @@ def test_create_target_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.CustomTargetType() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.CustomTargetType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_target(**mock_args) + client.get_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_create_target_rest_flattened_error(transport: str = "rest"): +def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21641,15 +25305,13 @@ def test_create_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_target( - cloud_deploy.CreateTargetRequest(), - parent="parent_value", - target=cloud_deploy.Target(name="name_value"), - target_id="target_id_value", + client.get_custom_target_type( + cloud_deploy.GetCustomTargetTypeRequest(), + name="name_value", ) -def test_create_target_rest_error(): +def test_get_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21658,63 +25320,60 @@ def test_create_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateTargetRequest, + cloud_deploy.CreateCustomTargetTypeRequest, dict, ], ) -def test_update_target_rest(request_type): +def test_create_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } - request_init["target"] = { - "name": "projects/sample1/locations/sample2/targets/sample3", - "target_id": "target_id_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["custom_target_type"] = { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, - "require_approval": True, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "gke": {"cluster": "cluster_value", "internal_ip": True}, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, - "custom_target": {"custom_target_type": "custom_target_type_value"}, "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {"seconds": 751, "nanos": 543}, - "verbose": True, - } - ], - "deploy_parameters": {}, + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateTargetRequest.meta.fields["target"] + test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21742,7 +25401,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["target"].items(): # pragma: NO COVER + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21772,10 +25431,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["target"][field])): - del request_init["target"][field][i][subfield] + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] else: - del request_init["target"][field][subfield] + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21790,13 +25449,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_target_rest_use_cached_wrapped_rpc(): +def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -21810,17 +25469,22 @@ def test_update_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_target in client._transport._wrapped_methods + assert ( + client._transport.create_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_custom_target_type + ] = mock_rpc request = {} - client.update_target(request) + client.create_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -21829,19 +25493,21 @@ def test_update_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_target(request) + client.create_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_target_rest_required_fields( - request_type=cloud_deploy.UpdateTargetRequest, +def test_create_custom_target_type_rest_required_fields( + request_type=cloud_deploy.CreateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["custom_target_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21849,29 +25515,40 @@ def test_update_target_rest_required_fields( ) # verify fields with default values are dropped + assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "customTargetTypeId" in jsonified_request + assert ( + jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_target._get_unset_required_fields(jsonified_request) + ).create_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "custom_target_type_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "customTargetTypeId" in jsonified_request + assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21892,7 +25569,7 @@ def test_update_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21905,39 +25582,45 @@ def test_update_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_target(request) + response = client.create_custom_target_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "customTargetTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_target_rest_unset_required_fields(): +def test_create_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_target._get_unset_required_fields({}) + unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "customTargetTypeId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "target", + "parent", + "customTargetTypeId", + "customTargetType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_target_rest_interceptors(null_interceptor): +def test_create_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21952,14 +25635,14 @@ def test_update_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_target" + transports.CloudDeployRestInterceptor, "post_create_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_target" + transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateTargetRequest.pb( - cloud_deploy.UpdateTargetRequest() + pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( + cloud_deploy.CreateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -21975,7 +25658,7 @@ def test_update_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateTargetRequest() + request = cloud_deploy.CreateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21983,7 +25666,7 @@ def test_update_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_target( + client.create_custom_target_type( request, metadata=[ ("key", "val"), @@ -21995,8 +25678,8 @@ def test_update_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest +def test_create_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22004,9 +25687,7 @@ def test_update_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22018,10 +25699,10 @@ def test_update_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_target(request) + client.create_custom_target_type(request) -def test_update_target_rest_flattened(): +def test_create_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22033,14 +25714,13 @@ def test_update_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) mock_args.update(sample_request) @@ -22051,20 +25731,20 @@ def test_update_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_target(**mock_args) + client.create_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{target.name=projects/*/locations/*/targets/*}" + "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" % client.transport._host, args[1], ) -def test_update_target_rest_flattened_error(transport: str = "rest"): +def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22073,14 +25753,15 @@ def test_update_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_target( - cloud_deploy.UpdateTargetRequest(), - target=cloud_deploy.Target(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_custom_target_type( + cloud_deploy.CreateCustomTargetTypeRequest(), + parent="parent_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + custom_target_type_id="custom_target_type_id_value", ) -def test_update_target_rest_error(): +def test_create_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22089,18 +25770,125 @@ def test_update_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteTargetRequest, + cloud_deploy.UpdateCustomTargetTypeRequest, dict, ], ) -def test_delete_target_rest(request_type): +def test_update_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } + request_init["custom_target_type"] = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ + "custom_target_type" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["custom_target_type"][field])): + del request_init["custom_target_type"][field][i][subfield] + else: + del request_init["custom_target_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22115,13 +25903,13 @@ def test_delete_target_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_target_rest_use_cached_wrapped_rpc(): +def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22135,17 +25923,22 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_target in client._transport._wrapped_methods + assert ( + client._transport.update_custom_target_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_target] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_custom_target_type + ] = mock_rpc request = {} - client.delete_target(request) + client.update_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -22154,20 +25947,19 @@ def test_delete_target_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_target(request) + client.update_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_target_rest_required_fields( - request_type=cloud_deploy.DeleteTargetRequest, +def test_update_custom_target_type_rest_required_fields( + request_type=cloud_deploy.UpdateCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22178,30 +25970,26 @@ def test_delete_target_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_target._get_unset_required_fields(jsonified_request) + ).update_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "allow_missing", - "etag", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22222,9 +26010,10 @@ def test_delete_target_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22234,34 +26023,39 @@ def test_delete_target_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_target(request) + response = client.update_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_target_rest_unset_required_fields(): +def test_update_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_target._get_unset_required_fields({}) + unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "allowMissing", - "etag", "requestId", + "updateMask", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "updateMask", + "customTargetType", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_target_rest_interceptors(null_interceptor): +def test_update_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22276,14 +26070,14 @@ def test_delete_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_target" + transports.CloudDeployRestInterceptor, "post_update_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_target" + transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteTargetRequest.pb( - cloud_deploy.DeleteTargetRequest() + pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( + cloud_deploy.UpdateCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22299,7 +26093,7 @@ def test_delete_target_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.DeleteTargetRequest() + request = cloud_deploy.UpdateCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22307,7 +26101,7 @@ def test_delete_target_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_target( + client.update_custom_target_type( request, metadata=[ ("key", "val"), @@ -22319,8 +26113,8 @@ def test_delete_target_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_target_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest +def test_update_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22328,7 +26122,11 @@ def test_delete_target_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request_init = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22340,10 +26138,10 @@ def test_delete_target_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_target(request) + client.update_custom_target_type(request) -def test_delete_target_rest_flattened(): +def test_update_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22355,11 +26153,16 @@ def test_delete_target_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + sample_request = { + "custom_target_type": { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -22370,19 +26173,20 @@ def test_delete_target_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_target(**mock_args) + client.update_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + % client.transport._host, args[1], ) -def test_delete_target_rest_flattened_error(transport: str = "rest"): +def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22391,13 +26195,14 @@ def test_delete_target_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_target( - cloud_deploy.DeleteTargetRequest(), - name="name_value", + client.update_custom_target_type( + cloud_deploy.UpdateCustomTargetTypeRequest(), + custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_target_rest_error(): +def test_update_custom_target_type_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22406,46 +26211,41 @@ def test_delete_target_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListCustomTargetTypesRequest, + cloud_deploy.DeleteCustomTargetTypeRequest, dict, ], ) -def test_list_custom_target_types_rest(request_type): +def test_delete_custom_target_type_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomTargetTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): +def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22460,7 +26260,7 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_custom_target_types + client._transport.delete_custom_target_type in client._transport._wrapped_methods ) @@ -22470,29 +26270,33 @@ def test_list_custom_target_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_custom_target_types + client._transport.delete_custom_target_type ] = mock_rpc request = {} - client.list_custom_target_types(request) + client.delete_custom_target_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_target_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_custom_target_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_target_types_rest_required_fields( - request_type=cloud_deploy.ListCustomTargetTypesRequest, +def test_delete_custom_target_type_rest_required_fields( + request_type=cloud_deploy.DeleteCustomTargetTypeRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22503,30 +26307,30 @@ def test_list_custom_target_types_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_custom_target_types._get_unset_required_fields(jsonified_request) + ).delete_custom_target_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "allow_missing", + "etag", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22535,7 +26339,7 @@ def test_list_custom_target_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22547,49 +26351,46 @@ def test_list_custom_target_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_custom_target_types(request) + response = client.delete_custom_target_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_custom_target_types_rest_unset_required_fields(): +def test_delete_custom_target_type_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_custom_target_types._get_unset_required_fields({}) + unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "allowMissing", + "etag", + "requestId", + "validateOnly", ) ) - & set(("parent",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_target_types_rest_interceptors(null_interceptor): +def test_delete_custom_target_type_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22602,14 +26403,16 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_custom_target_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" + transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( - cloud_deploy.ListCustomTargetTypesRequest() + pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( + cloud_deploy.DeleteCustomTargetTypeRequest() ) transcode.return_value = { "method": "post", @@ -22621,19 +26424,19 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListCustomTargetTypesResponse.to_json( - cloud_deploy.ListCustomTargetTypesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListCustomTargetTypesRequest() + request = cloud_deploy.DeleteCustomTargetTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListCustomTargetTypesResponse() + post.return_value = operations_pb2.Operation() - client.list_custom_target_types( + client.delete_custom_target_type( request, metadata=[ ("key", "val"), @@ -22645,8 +26448,8 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_custom_target_types_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListCustomTargetTypesRequest +def test_delete_custom_target_type_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22654,7 +26457,9 @@ def test_list_custom_target_types_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22666,10 +26471,10 @@ def test_list_custom_target_types_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_custom_target_types(request) + client.delete_custom_target_type(request) -def test_list_custom_target_types_rest_flattened(): +def test_delete_custom_target_type_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22678,40 +26483,40 @@ def test_list_custom_target_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListCustomTargetTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListCustomTargetTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_custom_target_types(**mock_args) + client.delete_custom_target_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" % client.transport._host, args[1], ) -def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): +def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22720,83 +26525,26 @@ def test_list_custom_target_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_custom_target_types( - cloud_deploy.ListCustomTargetTypesRequest(), - parent="parent_value", + client.delete_custom_target_type( + cloud_deploy.DeleteCustomTargetTypeRequest(), + name="name_value", ) -def test_list_custom_target_types_rest_pager(transport: str = "rest"): +def test_delete_custom_target_type_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - next_page_token="abc", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[], - next_page_token="def", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListCustomTargetTypesResponse( - custom_target_types=[ - cloud_deploy.CustomTargetType(), - cloud_deploy.CustomTargetType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_deploy.ListCustomTargetTypesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_custom_target_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.CustomTargetType) for i in results) - - pages = list(client.list_custom_target_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetCustomTargetTypeRequest, + cloud_deploy.ListReleasesRequest, dict, ], ) -def test_get_custom_target_type_rest(request_type): +def test_list_releases_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22804,42 +26552,36 @@ def test_get_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType( - name="name_value", - custom_target_type_id="custom_target_type_id_value", - uid="uid_value", - description="description_value", - etag="etag_value", + return_value = cloud_deploy.ListReleasesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.CustomTargetType) - assert response.name == "name_value" - assert response.custom_target_type_id == "custom_target_type_id_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListReleasesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_list_releases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -22853,40 +26595,35 @@ def test_get_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.list_releases in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc request = {} - client.get_custom_target_type(request) + client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_target_type(request) + client.list_releases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_target_type_rest_required_fields( - request_type=cloud_deploy.GetCustomTargetTypeRequest, +def test_list_releases_rest_required_fields( + request_type=cloud_deploy.ListReleasesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22897,21 +26634,30 @@ def test_get_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_target_type._get_unset_required_fields(jsonified_request) + ).list_releases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22920,7 +26666,7 @@ def test_get_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22941,30 +26687,40 @@ def test_get_custom_target_type_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_custom_target_type(request) + response = client.list_releases(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_target_type_rest_unset_required_fields(): +def test_list_releases_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_releases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_target_type_rest_interceptors(null_interceptor): +def test_list_releases_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22977,14 +26733,14 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_custom_target_type" + transports.CloudDeployRestInterceptor, "post_list_releases" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_list_releases" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( - cloud_deploy.GetCustomTargetTypeRequest() + pb_message = cloud_deploy.ListReleasesRequest.pb( + cloud_deploy.ListReleasesRequest() ) transcode.return_value = { "method": "post", @@ -22996,19 +26752,19 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.CustomTargetType.to_json( - cloud_deploy.CustomTargetType() + req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( + cloud_deploy.ListReleasesResponse() ) - request = cloud_deploy.GetCustomTargetTypeRequest() + request = cloud_deploy.ListReleasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.CustomTargetType() + post.return_value = cloud_deploy.ListReleasesResponse() - client.get_custom_target_type( + client.list_releases( request, metadata=[ ("key", "val"), @@ -23020,8 +26776,8 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetCustomTargetTypeRequest +def test_list_releases_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23030,7 +26786,7 @@ def test_get_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23043,10 +26799,10 @@ def test_get_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_custom_target_type(request) + client.list_releases(request) -def test_get_custom_target_type_rest_flattened(): +def test_list_releases_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23055,16 +26811,16 @@ def test_get_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.CustomTargetType() + return_value = cloud_deploy.ListReleasesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -23072,25 +26828,25 @@ def test_get_custom_target_type_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.CustomTargetType.pb(return_value) + return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_custom_target_type(**mock_args) + client.list_releases(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_list_releases_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23099,157 +26855,134 @@ def test_get_custom_target_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_target_type( - cloud_deploy.GetCustomTargetTypeRequest(), - name="name_value", + client.list_releases( + cloud_deploy.ListReleasesRequest(), + parent="parent_value", ) -def test_get_custom_target_type_rest_error(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_deploy.CreateCustomTargetTypeRequest, - dict, - ], -) -def test_create_custom_target_type_rest(request_type): +def test_list_releases_rest_pager(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["custom_target_type"] = { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + next_page_token="abc", + ), + cloud_deploy.ListReleasesResponse( + releases=[], + next_page_token="def", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + ), + ) + # Two responses for two calls + response = response + response - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - subfields_not_in_runtime = [] + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pager = client.list_releases(request=sample_request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Release) for i in results) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] - else: - del request_init["custom_target_type"][field][subfield] + pages = list(client.list_releases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetReleaseRequest, + dict, + ], +) +def test_get_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release( + name="name_value", + uid="uid_value", + description="description_value", + abandoned=True, + skaffold_config_uri="skaffold_config_uri_value", + skaffold_config_path="skaffold_config_path_value", + render_state=cloud_deploy.Release.RenderState.SUCCEEDED, + etag="etag_value", + skaffold_version="skaffold_version_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.Release) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.abandoned is True + assert response.skaffold_config_uri == "skaffold_config_uri_value" + assert response.skaffold_config_path == "skaffold_config_path_value" + assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED + assert response.etag == "etag_value" + assert response.skaffold_version == "skaffold_version_value" -def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_get_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23263,45 +26996,33 @@ def test_create_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.get_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_release] = mock_rpc request = {} - client.create_custom_target_type(request) + client.get_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_custom_target_type(request) + client.get_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_target_type_rest_required_fields( - request_type=cloud_deploy.CreateCustomTargetTypeRequest, -): +def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["parent"] = "" - request_init["custom_target_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23309,40 +27030,24 @@ def test_create_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped - assert "customTargetTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "customTargetTypeId" in jsonified_request - assert ( - jsonified_request["customTargetTypeId"] == request_init["custom_target_type_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["customTargetTypeId"] = "custom_target_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "custom_target_type_id", - "request_id", - "validate_only", - ) - ) + ).get_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "customTargetTypeId" in jsonified_request - assert jsonified_request["customTargetTypeId"] == "custom_target_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23351,7 +27056,7 @@ def test_create_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23363,58 +27068,39 @@ def test_create_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_custom_target_type(request) + response = client.get_release(request) - expected_params = [ - ( - "customTargetTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_target_type_rest_unset_required_fields(): +def test_get_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "customTargetTypeId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "customTargetTypeId", - "customTargetType", - ) - ) - ) + unset_fields = transport.get_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_target_type_rest_interceptors(null_interceptor): +def test_get_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23427,17 +27113,13 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_custom_target_type" + transports.CloudDeployRestInterceptor, "post_get_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_get_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( - cloud_deploy.CreateCustomTargetTypeRequest() - ) + pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23448,19 +27130,17 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) - request = cloud_deploy.CreateCustomTargetTypeRequest() + request = cloud_deploy.GetReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.Release() - client.create_custom_target_type( + client.get_release( request, metadata=[ ("key", "val"), @@ -23472,8 +27152,8 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateCustomTargetTypeRequest +def test_get_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23481,7 +27161,9 @@ def test_create_custom_target_type_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23493,10 +27175,10 @@ def test_create_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_custom_target_type(request) + client.get_release(request) -def test_create_custom_target_type_rest_flattened(): +def test_get_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23505,40 +27187,42 @@ def test_create_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.Release() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_custom_target_type(**mock_args) + client.get_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/customTargetTypes" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" % client.transport._host, args[1], ) -def test_create_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_get_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23547,15 +27231,13 @@ def test_create_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_custom_target_type( - cloud_deploy.CreateCustomTargetTypeRequest(), - parent="parent_value", - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - custom_target_type_id="custom_target_type_id_value", + client.get_release( + cloud_deploy.GetReleaseRequest(), + name="name_value", ) -def test_create_custom_target_type_rest_error(): +def test_get_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23564,11 +27246,11 @@ def test_create_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.UpdateCustomTargetTypeRequest, + cloud_deploy.CreateReleaseRequest, dict, ], ) -def test_update_custom_target_type_rest(request_type): +def test_create_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23576,52 +27258,229 @@ def test_update_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - request_init["custom_target_type"] = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3", - "custom_target_type_id": "custom_target_type_id_value", + request_init["release"] = { + "name": "name_value", "uid": "uid_value", "description": "description_value", "annotations": {}, "labels": {}, + "abandoned": True, "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": { + "standard": { + "verify": True, + "predeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + "postdeploy": { + "actions": ["actions_value1", "actions_value2"] + }, + }, + "canary": { + "runtime_config": { + "kubernetes": { + "gateway_service_mesh": { + "http_route": "http_route_value", + "service": "service_value", + "deployment": "deployment_value", + "route_update_wait_time": { + "seconds": 751, + "nanos": 543, + }, + "stable_cutback_duration": {}, + }, + "service_networking": { + "service": "service_value", + "deployment": "deployment_value", + "disable_pod_overprovisioning": True, + }, + }, + "cloud_run": { + "automatic_traffic_control": True, + "canary_revision_tags": [ + "canary_revision_tags_value1", + "canary_revision_tags_value2", + ], + "prior_revision_tags": [ + "prior_revision_tags_value1", + "prior_revision_tags_value2", + ], + "stable_revision_tags": [ + "stable_revision_tags_value1", + "stable_revision_tags_value2", + ], + }, + }, + "canary_deployment": { + "percentages": [1170, 1171], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + }, + "custom_canary_deployment": { + "phase_configs": [ + { + "phase_id": "phase_id_value", + "percentage": 1054, + "profiles": [ + "profiles_value1", + "profiles_value2", + ], + "verify": True, + "predeploy": {}, + "postdeploy": {}, + } + ] + }, + }, + }, + "deploy_parameters": [ + {"values": {}, "match_target_labels": {}} + ], + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + "targets_type_condition": { + "status": True, + "error_details": "error_details_value", + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": { + "cluster": "cluster_value", + "internal_ip": True, + "proxy_url": "proxy_url_value", + }, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "multi_target": { + "target_ids": ["target_ids_value1", "target_ids_value2"] + }, + "custom_target": {"custom_target_type": "custom_target_type_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {}, + "verbose": True, + } + ], + "deploy_parameters": {}, + } + ], + "custom_target_type_snapshots": [ + { + "name": "name_value", + "custom_target_type_id": "custom_target_type_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "etag": "etag_value", + "custom_actions": { + "render_action": "render_action_value", + "deploy_action": "deploy_action_value", + "include_skaffold_modules": [ + { + "configs": ["configs_value1", "configs_value2"], + "git": { + "repo": "repo_value", + "path": "path_value", + "ref": "ref_value", + }, + "google_cloud_storage": { + "source": "source_value", + "path": "path_value", + }, + "google_cloud_build_repo": { + "repository": "repository_value", + "path": "path_value", + "ref": "ref_value", + }, + } + ], + }, + } + ], + "render_state": 1, "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + "condition": { + "release_ready_condition": {"status": True}, + "skaffold_supported_condition": { + "status": True, + "skaffold_support_state": 1, + "maintenance_mode_time": {}, + "support_expiration_time": {}, + }, }, + "deploy_parameters": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.UpdateCustomTargetTypeRequest.meta.fields[ - "custom_target_type" - ] + test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -23649,7 +27508,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_target_type"].items(): # pragma: NO COVER + for field, value in request_init["release"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -23679,10 +27538,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_target_type"][field])): - del request_init["custom_target_type"][field][i][subfield] + for i in range(0, len(request_init["release"][field])): + del request_init["release"][field][i][subfield] else: - del request_init["custom_target_type"][field][subfield] + del request_init["release"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -23697,13 +27556,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_create_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -23717,22 +27576,17 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.create_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_release] = mock_rpc request = {} - client.update_custom_target_type(request) + client.create_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -23741,19 +27595,21 @@ def test_update_custom_target_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_custom_target_type(request) + client.create_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_custom_target_type_rest_required_fields( - request_type=cloud_deploy.UpdateCustomTargetTypeRequest, +def test_create_release_rest_required_fields( + request_type=cloud_deploy.CreateReleaseRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} + request_init["parent"] = "" + request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23761,29 +27617,39 @@ def test_update_custom_target_type_rest_required_fields( ) # verify fields with default values are dropped + assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == request_init["release_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_custom_target_type._get_unset_required_fields(jsonified_request) + ).create_release._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "override_deploy_policy", + "release_id", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23804,7 +27670,7 @@ def test_update_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -23817,39 +27683,46 @@ def test_update_custom_target_type_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_custom_target_type(request) + response = client.create_release(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "releaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_custom_target_type_rest_unset_required_fields(): +def test_create_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_custom_target_type._get_unset_required_fields({}) + unset_fields = transport.create_release._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "overrideDeployPolicy", + "releaseId", "requestId", - "updateMask", "validateOnly", ) ) & set( ( - "updateMask", - "customTargetType", + "parent", + "releaseId", + "release", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_custom_target_type_rest_interceptors(null_interceptor): +def test_create_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23864,14 +27737,14 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_update_custom_target_type" + transports.CloudDeployRestInterceptor, "post_create_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_create_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( - cloud_deploy.UpdateCustomTargetTypeRequest() + pb_message = cloud_deploy.CreateReleaseRequest.pb( + cloud_deploy.CreateReleaseRequest() ) transcode.return_value = { "method": "post", @@ -23887,7 +27760,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = cloud_deploy.UpdateCustomTargetTypeRequest() + request = cloud_deploy.CreateReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23895,7 +27768,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_custom_target_type( + client.create_release( request, metadata=[ ("key", "val"), @@ -23907,8 +27780,8 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.UpdateCustomTargetTypeRequest +def test_create_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23917,9 +27790,7 @@ def test_update_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } request = request_type(**request_init) @@ -23932,10 +27803,10 @@ def test_update_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_custom_target_type(request) + client.create_release(request) -def test_update_custom_target_type_rest_flattened(): +def test_create_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23948,15 +27819,14 @@ def test_update_custom_target_type_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "custom_target_type": { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" - } + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } # get truthy value for each flattened field mock_args = dict( - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) mock_args.update(sample_request) @@ -23967,20 +27837,20 @@ def test_update_custom_target_type_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_custom_target_type(**mock_args) + client.create_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{custom_target_type.name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" % client.transport._host, args[1], ) -def test_update_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_create_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23989,14 +27859,15 @@ def test_update_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_custom_target_type( - cloud_deploy.UpdateCustomTargetTypeRequest(), - custom_target_type=cloud_deploy.CustomTargetType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_release( + cloud_deploy.CreateReleaseRequest(), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", ) -def test_update_custom_target_type_rest_error(): +def test_create_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24005,11 +27876,11 @@ def test_update_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.DeleteCustomTargetTypeRequest, + cloud_deploy.AbandonReleaseRequest, dict, ], ) -def test_delete_custom_target_type_rest(request_type): +def test_abandon_release_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24017,29 +27888,31 @@ def test_delete_custom_target_type_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, cloud_deploy.AbandonReleaseResponse) -def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): +def test_abandon_release_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24053,39 +27926,30 @@ def test_delete_custom_target_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_custom_target_type - in client._transport._wrapped_methods - ) + assert client._transport.abandon_release in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_target_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc request = {} - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_custom_target_type(request) + client.abandon_release(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_custom_target_type_rest_required_fields( - request_type=cloud_deploy.DeleteCustomTargetTypeRequest, +def test_abandon_release_rest_required_fields( + request_type=cloud_deploy.AbandonReleaseRequest, ): transport_class = transports.CloudDeployRestTransport @@ -24101,7 +27965,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24110,16 +27974,7 @@ def test_delete_custom_target_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_target_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "request_id", - "validate_only", - ) - ) + ).abandon_release._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24133,7 +27988,7 @@ def test_delete_custom_target_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24145,46 +28000,40 @@ def test_delete_custom_target_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_custom_target_type(request) + response = client.abandon_release(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_custom_target_type_rest_unset_required_fields(): +def test_abandon_release_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_custom_target_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.abandon_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_custom_target_type_rest_interceptors(null_interceptor): +def test_abandon_release_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24197,16 +28046,14 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "post_abandon_release" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" + transports.CloudDeployRestInterceptor, "pre_abandon_release" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( - cloud_deploy.DeleteCustomTargetTypeRequest() + pb_message = cloud_deploy.AbandonReleaseRequest.pb( + cloud_deploy.AbandonReleaseRequest() ) transcode.return_value = { "method": "post", @@ -24218,19 +28065,19 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( + cloud_deploy.AbandonReleaseResponse() ) - request = cloud_deploy.DeleteCustomTargetTypeRequest() + request = cloud_deploy.AbandonReleaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.AbandonReleaseResponse() - client.delete_custom_target_type( + client.abandon_release( request, metadata=[ ("key", "val"), @@ -24242,8 +28089,8 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_custom_target_type_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.DeleteCustomTargetTypeRequest +def test_abandon_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24252,7 +28099,7 @@ def test_delete_custom_target_type_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } request = request_type(**request_init) @@ -24265,10 +28112,10 @@ def test_delete_custom_target_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_custom_target_type(request) + client.abandon_release(request) -def test_delete_custom_target_type_rest_flattened(): +def test_abandon_release_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24277,11 +28124,11 @@ def test_delete_custom_target_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.AbandonReleaseResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/customTargetTypes/sample3" + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" } # get truthy value for each flattened field @@ -24293,24 +28140,26 @@ def test_delete_custom_target_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_custom_target_type(**mock_args) + client.abandon_release(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/customTargetTypes/*}" + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" % client.transport._host, args[1], ) -def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest"): +def test_abandon_release_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24319,13 +28168,13 @@ def test_delete_custom_target_type_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_custom_target_type( - cloud_deploy.DeleteCustomTargetTypeRequest(), + client.abandon_release( + cloud_deploy.AbandonReleaseRequest(), name="name_value", ) -def test_delete_custom_target_type_rest_error(): +def test_abandon_release_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24334,48 +28183,149 @@ def test_delete_custom_target_type_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.ListReleasesRequest, + cloud_deploy.CreateDeployPolicyRequest, dict, ], ) -def test_list_releases_rest(request_type): +def test_create_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deploy_policy"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "restrict_rollouts": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_window": { + "time_zone": "time_zone_value", + "ranges": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + "start_time_of_day": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time_of_day": {}, + "day_of_week": [1], + } + ], + }, + } + } + ], + "etag": "etag_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.CreateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReleasesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_releases_rest_use_cached_wrapped_rpc(): +def test_create_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24389,35 +28339,44 @@ def test_list_releases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_releases in client._transport._wrapped_methods + assert ( + client._transport.create_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_releases] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_deploy_policy + ] = mock_rpc request = {} - client.list_releases(request) + client.create_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_releases(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_releases_rest_required_fields( - request_type=cloud_deploy.ListReleasesRequest, +def test_create_deploy_policy_rest_required_fields( + request_type=cloud_deploy.CreateDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" + request_init["deploy_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24425,26 +28384,29 @@ def test_list_releases_rest_required_fields( ) # verify fields with default values are dropped + assert "deployPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == request_init["deploy_policy_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["deployPolicyId"] = "deploy_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_releases._get_unset_required_fields(jsonified_request) + ).create_deploy_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "deploy_policy_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -24452,6 +28414,8 @@ def test_list_releases_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "deployPolicyId" in jsonified_request + assert jsonified_request["deployPolicyId"] == "deploy_policy_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24460,7 +28424,7 @@ def test_list_releases_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24472,49 +28436,58 @@ def test_list_releases_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_releases(request) + response = client.create_deploy_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "deployPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_releases_rest_unset_required_fields(): +def test_create_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_releases._get_unset_required_fields({}) + unset_fields = transport.create_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "deployPolicyId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deployPolicyId", + "deployPolicy", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_releases_rest_interceptors(null_interceptor): +def test_create_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24527,14 +28500,16 @@ def test_list_releases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_list_releases" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_list_releases" + transports.CloudDeployRestInterceptor, "pre_create_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.ListReleasesRequest.pb( - cloud_deploy.ListReleasesRequest() + pb_message = cloud_deploy.CreateDeployPolicyRequest.pb( + cloud_deploy.CreateDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -24546,19 +28521,19 @@ def test_list_releases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( - cloud_deploy.ListReleasesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = cloud_deploy.ListReleasesRequest() + request = cloud_deploy.CreateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.ListReleasesResponse() + post.return_value = operations_pb2.Operation() - client.list_releases( + client.create_deploy_policy( request, metadata=[ ("key", "val"), @@ -24570,8 +28545,8 @@ def test_list_releases_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_releases_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest +def test_create_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24579,9 +28554,7 @@ def test_list_releases_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24593,10 +28566,10 @@ def test_list_releases_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_releases(request) + client.create_deploy_policy(request) -def test_list_releases_rest_flattened(): +def test_create_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24605,42 +28578,40 @@ def test_list_releases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.ListReleasesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.ListReleasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_releases(**mock_args) + client.create_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_list_releases_rest_flattened_error(transport: str = "rest"): +def test_create_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24649,134 +28620,170 @@ def test_list_releases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_releases( - cloud_deploy.ListReleasesRequest(), + client.create_deploy_policy( + cloud_deploy.CreateDeployPolicyRequest(), parent="parent_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + deploy_policy_id="deploy_policy_id_value", ) -def test_list_releases_rest_pager(transport: str = "rest"): +def test_create_deploy_policy_rest_error(): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - next_page_token="abc", - ), - cloud_deploy.ListReleasesResponse( - releases=[], - next_page_token="def", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - ], - next_page_token="ghi", - ), - cloud_deploy.ListReleasesResponse( - releases=[ - cloud_deploy.Release(), - cloud_deploy.Release(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - - pager = client.list_releases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_deploy.Release) for i in results) - - pages = list(client.list_releases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - cloud_deploy.GetReleaseRequest, + cloud_deploy.UpdateDeployPolicyRequest, dict, ], ) -def test_get_release_rest(request_type): +def test_update_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + # send a request that will satisfy transcoding + request_init = { + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + } + request_init["deploy_policy"] = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "suspended": True, + "selectors": [ + { + "delivery_pipeline": {"id": "id_value", "labels": {}}, + "target": {"id": "id_value", "labels": {}}, + } + ], + "rules": [ + { + "restrict_rollouts": { + "id": "id_value", + "invokers": [1], + "actions": [1], + "time_window": { + "time_zone": "time_zone_value", + "ranges": [ + { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + "start_time_of_day": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "end_time_of_day": {}, + "day_of_week": [1], + } + ], + }, + } + } + ], + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_deploy.UpdateDeployPolicyRequest.meta.fields["deploy_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deploy_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deploy_policy"][field])): + del request_init["deploy_policy"][field][i][subfield] + else: + del request_init["deploy_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release( - name="name_value", - uid="uid_value", - description="description_value", - abandoned=True, - skaffold_config_uri="skaffold_config_uri_value", - skaffold_config_path="skaffold_config_path_value", - render_state=cloud_deploy.Release.RenderState.SUCCEEDED, - etag="etag_value", - skaffold_version="skaffold_version_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.Release) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.abandoned is True - assert response.skaffold_config_uri == "skaffold_config_uri_value" - assert response.skaffold_config_path == "skaffold_config_path_value" - assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED - assert response.etag == "etag_value" - assert response.skaffold_version == "skaffold_version_value" + assert response.operation.name == "operations/spam" -def test_get_release_rest_use_cached_wrapped_rpc(): +def test_update_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -24790,33 +28797,42 @@ def test_get_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_release in client._transport._wrapped_methods + assert ( + client._transport.update_deploy_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deploy_policy + ] = mock_rpc request = {} - client.get_release(request) + client.update_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_release(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): +def test_update_deploy_policy_rest_required_fields( + request_type=cloud_deploy.UpdateDeployPolicyRequest, +): transport_class = transports.CloudDeployRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24827,21 +28843,26 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_release._get_unset_required_fields(jsonified_request) + ).update_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24850,7 +28871,7 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24862,39 +28883,52 @@ def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_release(request) + response = client.update_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_release_rest_unset_required_fields(): +def test_update_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_release._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deployPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_release_rest_interceptors(null_interceptor): +def test_update_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24907,13 +28941,17 @@ def test_get_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_get_release" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_get_release" + transports.CloudDeployRestInterceptor, "pre_update_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) + pb_message = cloud_deploy.UpdateDeployPolicyRequest.pb( + cloud_deploy.UpdateDeployPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24924,17 +28962,19 @@ def test_get_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = cloud_deploy.GetReleaseRequest() + request = cloud_deploy.UpdateDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.Release() + post.return_value = operations_pb2.Operation() - client.get_release( + client.update_deploy_policy( request, metadata=[ ("key", "val"), @@ -24946,8 +28986,8 @@ def test_get_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest +def test_update_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24956,7 +28996,9 @@ def test_get_release_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } request = request_type(**request_init) @@ -24969,10 +29011,10 @@ def test_get_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_release(request) + client.update_deploy_policy(request) -def test_get_release_rest_flattened(): +def test_update_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24981,42 +29023,43 @@ def test_get_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.Release() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "deploy_policy": { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_deploy.Release.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_release(**mock_args) + client.update_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" + "%s/v1/{deploy_policy.name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_get_release_rest_flattened_error(transport: str = "rest"): +def test_update_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25025,13 +29068,14 @@ def test_get_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_release( - cloud_deploy.GetReleaseRequest(), - name="name_value", + client.update_deploy_policy( + cloud_deploy.UpdateDeployPolicyRequest(), + deploy_policy=cloud_deploy.DeployPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_release_rest_error(): +def test_update_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25040,319 +29084,370 @@ def test_get_release_rest_error(): @pytest.mark.parametrize( "request_type", [ - cloud_deploy.CreateReleaseRequest, + cloud_deploy.DeleteDeployPolicyRequest, dict, ], ) -def test_create_release_rest(request_type): +def test_delete_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } - request_init["release"] = { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "abandoned": True, - "create_time": {"seconds": 751, "nanos": 543}, - "render_start_time": {}, - "render_end_time": {}, - "skaffold_config_uri": "skaffold_config_uri_value", - "skaffold_config_path": "skaffold_config_path_value", - "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], - "delivery_pipeline_snapshot": { - "name": "name_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "serial_pipeline": { - "stages": [ - { - "target_id": "target_id_value", - "profiles": ["profiles_value1", "profiles_value2"], - "strategy": { - "standard": { - "verify": True, - "predeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - "postdeploy": { - "actions": ["actions_value1", "actions_value2"] - }, - }, - "canary": { - "runtime_config": { - "kubernetes": { - "gateway_service_mesh": { - "http_route": "http_route_value", - "service": "service_value", - "deployment": "deployment_value", - "route_update_wait_time": { - "seconds": 751, - "nanos": 543, - }, - "stable_cutback_duration": {}, - }, - "service_networking": { - "service": "service_value", - "deployment": "deployment_value", - "disable_pod_overprovisioning": True, - }, - }, - "cloud_run": { - "automatic_traffic_control": True, - "canary_revision_tags": [ - "canary_revision_tags_value1", - "canary_revision_tags_value2", - ], - "prior_revision_tags": [ - "prior_revision_tags_value1", - "prior_revision_tags_value2", - ], - "stable_revision_tags": [ - "stable_revision_tags_value1", - "stable_revision_tags_value2", - ], - }, - }, - "canary_deployment": { - "percentages": [1170, 1171], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - }, - "custom_canary_deployment": { - "phase_configs": [ - { - "phase_id": "phase_id_value", - "percentage": 1054, - "profiles": [ - "profiles_value1", - "profiles_value2", - ], - "verify": True, - "predeploy": {}, - "postdeploy": {}, - } - ] - }, - }, - }, - "deploy_parameters": [ - {"values": {}, "match_target_labels": {}} - ], - } - ] - }, - "condition": { - "pipeline_ready_condition": {"status": True, "update_time": {}}, - "targets_present_condition": { - "status": True, - "missing_targets": [ - "missing_targets_value1", - "missing_targets_value2", - ], - "update_time": {}, - }, - "targets_type_condition": { - "status": True, - "error_details": "error_details_value", - }, - }, - "etag": "etag_value", - "suspended": True, - }, - "target_snapshots": [ - { - "name": "name_value", - "target_id": "target_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "require_approval": True, - "create_time": {}, - "update_time": {}, - "gke": {"cluster": "cluster_value", "internal_ip": True}, - "anthos_cluster": {"membership": "membership_value"}, - "run": {"location": "location_value"}, - "multi_target": { - "target_ids": ["target_ids_value1", "target_ids_value2"] - }, - "custom_target": {"custom_target_type": "custom_target_type_value"}, - "etag": "etag_value", - "execution_configs": [ - { - "usages": [1], - "default_pool": { - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "private_pool": { - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - }, - "worker_pool": "worker_pool_value", - "service_account": "service_account_value", - "artifact_storage": "artifact_storage_value", - "execution_timeout": {}, - "verbose": True, - } - ], - "deploy_parameters": {}, - } - ], - "custom_target_type_snapshots": [ - { - "name": "name_value", - "custom_target_type_id": "custom_target_type_id_value", - "uid": "uid_value", - "description": "description_value", - "annotations": {}, - "labels": {}, - "create_time": {}, - "update_time": {}, - "etag": "etag_value", - "custom_actions": { - "render_action": "render_action_value", - "deploy_action": "deploy_action_value", - "include_skaffold_modules": [ - { - "configs": ["configs_value1", "configs_value2"], - "git": { - "repo": "repo_value", - "path": "path_value", - "ref": "ref_value", - }, - "google_cloud_storage": { - "source": "source_value", - "path": "path_value", - }, - "google_cloud_build_repo": { - "repository": "repository_value", - "path": "path_value", - "ref": "ref_value", - }, - } - ], - }, + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deploy_policy(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deploy_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_deploy_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deploy_policy + ] = mock_rpc + + request = {} + client.delete_deploy_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deploy_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deploy_policy_rest_required_fields( + request_type=cloud_deploy.DeleteDeployPolicyRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deploy_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } - ], - "render_state": 1, - "etag": "etag_value", - "skaffold_version": "skaffold_version_value", - "target_artifacts": {}, - "target_renders": {}, - "condition": { - "release_ready_condition": {"status": True}, - "skaffold_supported_condition": { - "status": True, - "skaffold_support_state": 1, - "maintenance_mode_time": {}, - "support_expiration_time": {}, - }, - }, - "deploy_parameters": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + transcode.return_value = transcode_result - # Determine if the message type is proto-plus or protobuf - test_field = cloud_deploy.CreateReleaseRequest.meta.fields["release"] + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deploy_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deploy_policy_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deploy_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deploy_policy_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_deploy_policy" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_deploy_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteDeployPolicyRequest.pb( + cloud_deploy.DeleteDeployPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + request = cloud_deploy.DeleteDeployPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - subfields_not_in_runtime = [] + client.delete_deploy_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["release"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pre.assert_called_once() + post.assert_called_once() - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["release"][field])): - del request_init["release"][field][i][subfield] - else: - del request_init["release"][field][subfield] +def test_delete_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeployPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deploy_policy(request) + + +def test_delete_deploy_policy_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deploy_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deploy_policy_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deploy_policy( + cloud_deploy.DeleteDeployPolicyRequest(), + name="name_value", + ) + + +def test_delete_deploy_policy_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeployPoliciesRequest, + dict, + ], +) +def test_list_deploy_policies_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeployPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListDeployPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_release_rest_use_cached_wrapped_rpc(): +def test_list_deploy_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25366,40 +29461,39 @@ def test_create_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_release in client._transport._wrapped_methods + assert ( + client._transport.list_deploy_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_deploy_policies + ] = mock_rpc request = {} - client.create_release(request) + client.list_deploy_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_release(request) + client.list_deploy_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_release_rest_required_fields( - request_type=cloud_deploy.CreateReleaseRequest, +def test_list_deploy_policies_rest_required_fields( + request_type=cloud_deploy.ListDeployPoliciesRequest, ): transport_class = transports.CloudDeployRestTransport request_init = {} request_init["parent"] = "" - request_init["release_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -25407,29 +29501,26 @@ def test_create_release_rest_required_fields( ) # verify fields with default values are dropped - assert "releaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == request_init["release_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["releaseId"] = "release_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_release._get_unset_required_fields(jsonified_request) + ).list_deploy_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "release_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -25437,8 +29528,6 @@ def test_create_release_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "releaseId" in jsonified_request - assert jsonified_request["releaseId"] == "release_id_value" client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25447,7 +29536,7 @@ def test_create_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25459,58 +29548,49 @@ def test_create_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_release(request) + response = client.list_deploy_policies(request) - expected_params = [ - ( - "releaseId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_release_rest_unset_required_fields(): +def test_list_deploy_policies_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_release._get_unset_required_fields({}) + unset_fields = transport.list_deploy_policies._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "releaseId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "releaseId", - "release", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_release_rest_interceptors(null_interceptor): +def test_list_deploy_policies_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25523,16 +29603,14 @@ def test_create_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudDeployRestInterceptor, "post_create_release" + transports.CloudDeployRestInterceptor, "post_list_deploy_policies" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_create_release" + transports.CloudDeployRestInterceptor, "pre_list_deploy_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.CreateReleaseRequest.pb( - cloud_deploy.CreateReleaseRequest() + pb_message = cloud_deploy.ListDeployPoliciesRequest.pb( + cloud_deploy.ListDeployPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -25544,19 +29622,19 @@ def test_create_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = cloud_deploy.ListDeployPoliciesResponse.to_json( + cloud_deploy.ListDeployPoliciesResponse() ) - request = cloud_deploy.CreateReleaseRequest() + request = cloud_deploy.ListDeployPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = cloud_deploy.ListDeployPoliciesResponse() - client.create_release( + client.list_deploy_policies( request, metadata=[ ("key", "val"), @@ -25568,8 +29646,8 @@ def test_create_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest +def test_list_deploy_policies_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeployPoliciesRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25577,9 +29655,7 @@ def test_create_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25591,10 +29667,10 @@ def test_create_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_release(request) + client.list_deploy_policies(request) -def test_create_release_rest_flattened(): +def test_list_deploy_policies_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25603,42 +29679,40 @@ def test_create_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = cloud_deploy.ListDeployPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_deploy.ListDeployPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_release(**mock_args) + client.list_deploy_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + "%s/v1/{parent=projects/*/locations/*}/deployPolicies" % client.transport._host, args[1], ) -def test_create_release_rest_flattened_error(transport: str = "rest"): +def test_list_deploy_policies_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25647,60 +29721,124 @@ def test_create_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_release( - cloud_deploy.CreateReleaseRequest(), + client.list_deploy_policies( + cloud_deploy.ListDeployPoliciesRequest(), parent="parent_value", - release=cloud_deploy.Release(name="name_value"), - release_id="release_id_value", ) -def test_create_release_rest_error(): +def test_list_deploy_policies_rest_pager(transport: str = "rest"): client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[], + next_page_token="def", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeployPoliciesResponse( + deploy_policies=[ + cloud_deploy.DeployPolicy(), + cloud_deploy.DeployPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeployPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deploy_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeployPolicy) for i in results) + + pages = list(client.list_deploy_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - cloud_deploy.AbandonReleaseRequest, + cloud_deploy.GetDeployPolicyRequest, dict, ], ) -def test_abandon_release_rest(request_type): +def test_get_deploy_policy_rest(request_type): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy( + name="name_value", + uid="uid_value", + description="description_value", + suspended=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_deploy.AbandonReleaseResponse) + assert isinstance(response, cloud_deploy.DeployPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.suspended is True + assert response.etag == "etag_value" -def test_abandon_release_rest_use_cached_wrapped_rpc(): +def test_get_deploy_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -25714,30 +29852,32 @@ def test_abandon_release_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.abandon_release in client._transport._wrapped_methods + assert client._transport.get_deploy_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.abandon_release] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_deploy_policy + ] = mock_rpc request = {} - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.abandon_release(request) + client.get_deploy_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_abandon_release_rest_required_fields( - request_type=cloud_deploy.AbandonReleaseRequest, +def test_get_deploy_policy_rest_required_fields( + request_type=cloud_deploy.GetDeployPolicyRequest, ): transport_class = transports.CloudDeployRestTransport @@ -25753,7 +29893,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -25762,7 +29902,7 @@ def test_abandon_release_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).abandon_release._get_unset_required_fields(jsonified_request) + ).get_deploy_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -25776,7 +29916,7 @@ def test_abandon_release_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -25788,40 +29928,39 @@ def test_abandon_release_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.abandon_release(request) + response = client.get_deploy_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_abandon_release_rest_unset_required_fields(): +def test_get_deploy_policy_rest_unset_required_fields(): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.abandon_release._get_unset_required_fields({}) + unset_fields = transport.get_deploy_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_abandon_release_rest_interceptors(null_interceptor): +def test_get_deploy_policy_rest_interceptors(null_interceptor): transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25834,14 +29973,14 @@ def test_abandon_release_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudDeployRestInterceptor, "post_abandon_release" + transports.CloudDeployRestInterceptor, "post_get_deploy_policy" ) as post, mock.patch.object( - transports.CloudDeployRestInterceptor, "pre_abandon_release" + transports.CloudDeployRestInterceptor, "pre_get_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_deploy.AbandonReleaseRequest.pb( - cloud_deploy.AbandonReleaseRequest() + pb_message = cloud_deploy.GetDeployPolicyRequest.pb( + cloud_deploy.GetDeployPolicyRequest() ) transcode.return_value = { "method": "post", @@ -25853,19 +29992,19 @@ def test_abandon_release_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( - cloud_deploy.AbandonReleaseResponse() + req.return_value._content = cloud_deploy.DeployPolicy.to_json( + cloud_deploy.DeployPolicy() ) - request = cloud_deploy.AbandonReleaseRequest() + request = cloud_deploy.GetDeployPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_deploy.AbandonReleaseResponse() + post.return_value = cloud_deploy.DeployPolicy() - client.abandon_release( + client.get_deploy_policy( request, metadata=[ ("key", "val"), @@ -25877,8 +30016,8 @@ def test_abandon_release_rest_interceptors(null_interceptor): post.assert_called_once() -def test_abandon_release_rest_bad_request( - transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest +def test_get_deploy_policy_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeployPolicyRequest ): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), @@ -25886,9 +30025,7 @@ def test_abandon_release_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25900,10 +30037,10 @@ def test_abandon_release_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.abandon_release(request) + client.get_deploy_policy(request) -def test_abandon_release_rest_flattened(): +def test_get_deploy_policy_rest_flattened(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25912,11 +30049,11 @@ def test_abandon_release_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = cloud_deploy.AbandonReleaseResponse() + return_value = cloud_deploy.DeployPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + "name": "projects/sample1/locations/sample2/deployPolicies/sample3" } # get truthy value for each flattened field @@ -25929,25 +30066,25 @@ def test_abandon_release_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + return_value = cloud_deploy.DeployPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.abandon_release(**mock_args) + client.get_deploy_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" + "%s/v1/{name=projects/*/locations/*/deployPolicies/*}" % client.transport._host, args[1], ) -def test_abandon_release_rest_flattened_error(transport: str = "rest"): +def test_get_deploy_policy_rest_flattened_error(transport: str = "rest"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -25956,13 +30093,13 @@ def test_abandon_release_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.abandon_release( - cloud_deploy.AbandonReleaseRequest(), + client.get_deploy_policy( + cloud_deploy.GetDeployPolicyRequest(), name="name_value", ) -def test_abandon_release_rest_error(): +def test_get_deploy_policy_rest_error(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -27888,6 +32025,7 @@ def test_create_rollout_rest_required_fields( # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "override_deploy_policy", "request_id", "rollout_id", "starting_phase_id", @@ -27956,6 +32094,7 @@ def test_create_rollout_rest_unset_required_fields(): assert set(unset_fields) == ( set( ( + "overrideDeployPolicy", "requestId", "rolloutId", "startingPhaseId", @@ -33229,6 +37368,11 @@ def test_cloud_deploy_base_transport(): "get_release", "create_release", "abandon_release", + "create_deploy_policy", + "update_deploy_policy", + "delete_deploy_policy", + "list_deploy_policies", + "get_deploy_policy", "approve_rollout", "advance_rollout", "cancel_rollout", @@ -33595,6 +37739,21 @@ def test_cloud_deploy_client_transport_session_collision(transport_name): session1 = client1.transport.abandon_release._session session2 = client2.transport.abandon_release._session assert session1 != session2 + session1 = client1.transport.create_deploy_policy._session + session2 = client2.transport.create_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.update_deploy_policy._session + session2 = client2.transport.update_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.delete_deploy_policy._session + session2 = client2.transport.delete_deploy_policy._session + assert session1 != session2 + session1 = client1.transport.list_deploy_policies._session + session2 = client2.transport.list_deploy_policies._session + assert session1 != session2 + session1 = client1.transport.get_deploy_policy._session + session2 = client2.transport.get_deploy_policy._session + assert session1 != session2 session1 = client1.transport.approve_rollout._session session2 = client2.transport.approve_rollout._session assert session1 != session2 @@ -34002,10 +38161,38 @@ def test_parse_delivery_pipeline_path(): assert expected == actual -def test_job_path(): +def test_deploy_policy_path(): project = "winkle" location = "nautilus" - job = "scallop" + deploy_policy = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployPolicies/{deploy_policy}".format( + project=project, + location=location, + deploy_policy=deploy_policy, + ) + ) + actual = CloudDeployClient.deploy_policy_path(project, location, deploy_policy) + assert expected == actual + + +def test_parse_deploy_policy_path(): + expected = { + "project": "abalone", + "location": "squid", + "deploy_policy": "clam", + } + path = CloudDeployClient.deploy_policy_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_deploy_policy_path(path) + assert expected == actual + + +def test_job_path(): + project = "whelk" + location = "octopus" + job = "oyster" expected = "projects/{project}/locations/{location}/jobs/{job}".format( project=project, location=location, @@ -34017,9 +38204,9 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "job": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "job": "mussel", } path = CloudDeployClient.job_path(**expected) @@ -34029,12 +38216,12 @@ def test_parse_job_path(): def test_job_run_path(): - project = "whelk" - location = "octopus" - delivery_pipeline = "oyster" - release = "nudibranch" - rollout = "cuttlefish" - job_run = "mussel" + project = "winkle" + location = "nautilus" + delivery_pipeline = "scallop" + release = "abalone" + rollout = "squid" + job_run = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( project=project, location=location, @@ -34051,12 +38238,12 @@ def test_job_run_path(): def test_parse_job_run_path(): expected = { - "project": "winkle", - "location": "nautilus", - "delivery_pipeline": "scallop", - "release": "abalone", - "rollout": "squid", - "job_run": "clam", + "project": "whelk", + "location": "octopus", + "delivery_pipeline": "oyster", + "release": "nudibranch", + "rollout": "cuttlefish", + "job_run": "mussel", } path = CloudDeployClient.job_run_path(**expected) @@ -34066,9 +38253,9 @@ def test_parse_job_run_path(): def test_membership_path(): - project = "whelk" - location = "octopus" - membership = "oyster" + project = "winkle" + location = "nautilus" + membership = "scallop" expected = ( "projects/{project}/locations/{location}/memberships/{membership}".format( project=project, @@ -34082,9 +38269,9 @@ def test_membership_path(): def test_parse_membership_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "membership": "mussel", + "project": "abalone", + "location": "squid", + "membership": "clam", } path = CloudDeployClient.membership_path(**expected) @@ -34094,10 +38281,10 @@ def test_parse_membership_path(): def test_release_path(): - project = "winkle" - location = "nautilus" - delivery_pipeline = "scallop" - release = "abalone" + project = "whelk" + location = "octopus" + delivery_pipeline = "oyster" + release = "nudibranch" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, @@ -34112,10 +38299,10 @@ def test_release_path(): def test_parse_release_path(): expected = { - "project": "squid", - "location": "clam", - "delivery_pipeline": "whelk", - "release": "octopus", + "project": "cuttlefish", + "location": "mussel", + "delivery_pipeline": "winkle", + "release": "nautilus", } path = CloudDeployClient.release_path(**expected) @@ -34125,10 +38312,10 @@ def test_parse_release_path(): def test_repository_path(): - project = "oyster" - location = "nudibranch" - connection = "cuttlefish" - repository = "mussel" + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -34143,10 +38330,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "winkle", - "location": "nautilus", - "connection": "scallop", - "repository": "abalone", + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", } path = CloudDeployClient.repository_path(**expected) @@ -34156,11 +38343,11 @@ def test_parse_repository_path(): def test_rollout_path(): - project = "squid" - location = "clam" - delivery_pipeline = "whelk" - release = "octopus" - rollout = "oyster" + project = "cuttlefish" + location = "mussel" + delivery_pipeline = "winkle" + release = "nautilus" + rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, @@ -34176,11 +38363,11 @@ def test_rollout_path(): def test_parse_rollout_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "delivery_pipeline": "mussel", - "release": "winkle", - "rollout": "nautilus", + "project": "abalone", + "location": "squid", + "delivery_pipeline": "clam", + "release": "whelk", + "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) @@ -34190,9 +38377,9 @@ def test_parse_rollout_path(): def test_service_path(): - project = "scallop" - location = "abalone" - service = "squid" + project = "oyster" + location = "nudibranch" + service = "cuttlefish" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, @@ -34204,9 +38391,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "clam", - "location": "whelk", - "service": "octopus", + "project": "mussel", + "location": "winkle", + "service": "nautilus", } path = CloudDeployClient.service_path(**expected) @@ -34216,9 +38403,9 @@ def test_parse_service_path(): def test_target_path(): - project = "oyster" - location = "nudibranch" - target = "cuttlefish" + project = "scallop" + location = "abalone" + target = "squid" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, @@ -34230,9 +38417,9 @@ def test_target_path(): def test_parse_target_path(): expected = { - "project": "mussel", - "location": "winkle", - "target": "nautilus", + "project": "clam", + "location": "whelk", + "target": "octopus", } path = CloudDeployClient.target_path(**expected) @@ -34242,9 +38429,9 @@ def test_parse_target_path(): def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -34258,9 +38445,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", } path = CloudDeployClient.worker_pool_path(**expected) @@ -34270,7 +38457,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -34280,7 +38467,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "abalone", } path = CloudDeployClient.common_billing_account_path(**expected) @@ -34290,7 +38477,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -34300,7 +38487,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "clam", } path = CloudDeployClient.common_folder_path(**expected) @@ -34310,7 +38497,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -34320,7 +38507,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "octopus", } path = CloudDeployClient.common_organization_path(**expected) @@ -34330,7 +38517,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -34340,7 +38527,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nudibranch", } path = CloudDeployClient.common_project_path(**expected) @@ -34350,8 +38537,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -34362,8 +38549,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "winkle", + "location": "nautilus", } path = CloudDeployClient.common_location_path(**expected)