diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 4dd97d6534ef..23a85093a284 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -216,7 +217,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CatalogServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -228,9 +231,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.CatalogServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -355,8 +360,8 @@ async def sample_create_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry_type, entry_type_id]) if request is not None and has_flattened_params: raise ValueError( @@ -364,7 +369,10 @@ async def sample_create_entry_type(): "the individual field arguments should be set." ) - request = catalog.CreateEntryTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryTypeRequest): + request = catalog.CreateEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -377,11 +385,9 @@ async def sample_create_entry_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entry_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -481,8 +487,8 @@ async def sample_update_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry_type, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -490,7 +496,10 @@ async def sample_update_entry_type(): "the individual field arguments should be set." ) - request = catalog.UpdateEntryTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryTypeRequest): + request = catalog.UpdateEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -501,11 +510,9 @@ async def sample_update_entry_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_entry_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -611,8 +618,8 @@ async def sample_delete_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -620,7 +627,10 @@ async def sample_delete_entry_type(): "the individual field arguments should be set." ) - request = catalog.DeleteEntryTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryTypeRequest): + request = catalog.DeleteEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -629,11 +639,9 @@ async def sample_delete_entry_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entry_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -729,8 +737,8 @@ async def sample_list_entry_types(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -738,7 +746,10 @@ async def sample_list_entry_types(): "the individual field arguments should be set." ) - request = catalog.ListEntryTypesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryTypesRequest): + request = catalog.ListEntryTypesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -747,21 +758,9 @@ async def sample_list_entry_types(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entry_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_entry_types + ] # Certain fields should be provided within the metadata header; # add these here. @@ -852,8 +851,8 @@ async def sample_get_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -861,7 +860,10 @@ async def sample_get_entry_type(): "the individual field arguments should be set." ) - request = catalog.GetEntryTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryTypeRequest): + request = catalog.GetEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -870,21 +872,9 @@ async def sample_get_entry_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entry_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -992,8 +982,8 @@ async def sample_create_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, aspect_type, aspect_type_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1001,7 +991,10 @@ async def sample_create_aspect_type(): "the individual field arguments should be set." ) - request = catalog.CreateAspectTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateAspectTypeRequest): + request = catalog.CreateAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1014,11 +1007,9 @@ async def sample_create_aspect_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_aspect_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_aspect_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1123,8 +1114,8 @@ async def sample_update_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([aspect_type, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1132,7 +1123,10 @@ async def sample_update_aspect_type(): "the individual field arguments should be set." ) - request = catalog.UpdateAspectTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateAspectTypeRequest): + request = catalog.UpdateAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1143,11 +1137,9 @@ async def sample_update_aspect_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_aspect_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_aspect_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1253,8 +1245,8 @@ async def sample_delete_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1262,7 +1254,10 @@ async def sample_delete_aspect_type(): "the individual field arguments should be set." ) - request = catalog.DeleteAspectTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteAspectTypeRequest): + request = catalog.DeleteAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1271,11 +1266,9 @@ async def sample_delete_aspect_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_aspect_type, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_aspect_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1371,8 +1364,8 @@ async def sample_list_aspect_types(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1380,7 +1373,10 @@ async def sample_list_aspect_types(): "the individual field arguments should be set." ) - request = catalog.ListAspectTypesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListAspectTypesRequest): + request = catalog.ListAspectTypesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1389,21 +1385,9 @@ async def sample_list_aspect_types(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_aspect_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_aspect_types + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1496,8 +1480,8 @@ async def sample_get_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1505,7 +1489,10 @@ async def sample_get_aspect_type(): "the individual field arguments should be set." ) - request = catalog.GetAspectTypeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetAspectTypeRequest): + request = catalog.GetAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1514,21 +1501,9 @@ async def sample_get_aspect_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_aspect_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_aspect_type + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1632,8 +1607,8 @@ async def sample_create_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry_group, entry_group_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1641,7 +1616,10 @@ async def sample_create_entry_group(): "the individual field arguments should be set." ) - request = catalog.CreateEntryGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryGroupRequest): + request = catalog.CreateEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1654,11 +1632,9 @@ async def sample_create_entry_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry_group, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entry_group + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1759,8 +1735,8 @@ async def sample_update_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry_group, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1768,7 +1744,10 @@ async def sample_update_entry_group(): "the individual field arguments should be set." ) - request = catalog.UpdateEntryGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryGroupRequest): + request = catalog.UpdateEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1779,11 +1758,9 @@ async def sample_update_entry_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry_group, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_entry_group + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1889,8 +1866,8 @@ async def sample_delete_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1898,7 +1875,10 @@ async def sample_delete_entry_group(): "the individual field arguments should be set." ) - request = catalog.DeleteEntryGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryGroupRequest): + request = catalog.DeleteEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1907,11 +1887,9 @@ async def sample_delete_entry_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry_group, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entry_group + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2007,8 +1985,8 @@ async def sample_list_entry_groups(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2016,7 +1994,10 @@ async def sample_list_entry_groups(): "the individual field arguments should be set." ) - request = catalog.ListEntryGroupsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryGroupsRequest): + request = catalog.ListEntryGroupsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2025,21 +2006,9 @@ async def sample_list_entry_groups(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entry_groups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_entry_groups + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2130,8 +2099,8 @@ async def sample_get_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2139,7 +2108,10 @@ async def sample_get_entry_group(): "the individual field arguments should be set." ) - request = catalog.GetEntryGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryGroupRequest): + request = catalog.GetEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2148,21 +2120,9 @@ async def sample_get_entry_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry_group, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entry_group + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2283,8 +2243,8 @@ async def sample_create_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry, entry_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2292,7 +2252,10 @@ async def sample_create_entry(): "the individual field arguments should be set." ) - request = catalog.CreateEntryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryRequest): + request = catalog.CreateEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2305,11 +2268,9 @@ async def sample_create_entry(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entry + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2404,8 +2365,8 @@ async def sample_update_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2413,7 +2374,10 @@ async def sample_update_entry(): "the individual field arguments should be set." ) - request = catalog.UpdateEntryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryRequest): + request = catalog.UpdateEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2424,21 +2388,9 @@ async def sample_update_entry(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_entry + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2523,8 +2475,8 @@ async def sample_delete_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2532,7 +2484,10 @@ async def sample_delete_entry(): "the individual field arguments should be set." ) - request = catalog.DeleteEntryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryRequest): + request = catalog.DeleteEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2541,11 +2496,9 @@ async def sample_delete_entry(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entry + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2629,8 +2582,8 @@ async def sample_list_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2638,7 +2591,10 @@ async def sample_list_entries(): "the individual field arguments should be set." ) - request = catalog.ListEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntriesRequest): + request = catalog.ListEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2647,21 +2603,9 @@ async def sample_list_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_entries + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2753,8 +2697,8 @@ async def sample_get_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2762,7 +2706,10 @@ async def sample_get_entry(): "the individual field arguments should be set." ) - request = catalog.GetEntryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryRequest): + request = catalog.GetEntryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2771,21 +2718,9 @@ async def sample_get_entry(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entry + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2861,25 +2796,16 @@ async def sample_lookup_entry(): """ # Create or coerce a protobuf request object. - request = catalog.LookupEntryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.LookupEntryRequest): + request = catalog.LookupEntryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.lookup_entry + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2973,8 +2899,8 @@ async def sample_search_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, query]) if request is not None and has_flattened_params: raise ValueError( @@ -2982,7 +2908,10 @@ async def sample_search_entries(): "the individual field arguments should be set." ) - request = catalog.SearchEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.SearchEntriesRequest): + request = catalog.SearchEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2993,21 +2922,9 @@ async def sample_search_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_entries + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 607951e936c4..d37e69681f07 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -615,7 +616,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CatalogServiceTransport]] = None, + transport: Optional[ + Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -627,9 +630,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, CatalogServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -738,8 +743,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CatalogServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -832,8 +844,8 @@ def sample_create_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry_type, entry_type_id]) if request is not None and has_flattened_params: raise ValueError( @@ -841,10 +853,8 @@ def sample_create_entry_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.CreateEntryTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.CreateEntryTypeRequest): request = catalog.CreateEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -958,8 +968,8 @@ def sample_update_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry_type, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -967,10 +977,8 @@ def sample_update_entry_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.UpdateEntryTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.UpdateEntryTypeRequest): request = catalog.UpdateEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1088,8 +1096,8 @@ def sample_delete_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1097,10 +1105,8 @@ def sample_delete_entry_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.DeleteEntryTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.DeleteEntryTypeRequest): request = catalog.DeleteEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1206,8 +1212,8 @@ def sample_list_entry_types(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1215,10 +1221,8 @@ def sample_list_entry_types(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.ListEntryTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.ListEntryTypesRequest): request = catalog.ListEntryTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1319,8 +1323,8 @@ def sample_get_entry_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1328,10 +1332,8 @@ def sample_get_entry_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.GetEntryTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.GetEntryTypeRequest): request = catalog.GetEntryTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1449,8 +1451,8 @@ def sample_create_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, aspect_type, aspect_type_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1458,10 +1460,8 @@ def sample_create_aspect_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.CreateAspectTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.CreateAspectTypeRequest): request = catalog.CreateAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1580,8 +1580,8 @@ def sample_update_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([aspect_type, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1589,10 +1589,8 @@ def sample_update_aspect_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.UpdateAspectTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.UpdateAspectTypeRequest): request = catalog.UpdateAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1710,8 +1708,8 @@ def sample_delete_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1719,10 +1717,8 @@ def sample_delete_aspect_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.DeleteAspectTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.DeleteAspectTypeRequest): request = catalog.DeleteAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1828,8 +1824,8 @@ def sample_list_aspect_types(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1837,10 +1833,8 @@ def sample_list_aspect_types(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.ListAspectTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.ListAspectTypesRequest): request = catalog.ListAspectTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1943,8 +1937,8 @@ def sample_get_aspect_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1952,10 +1946,8 @@ def sample_get_aspect_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.GetAspectTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.GetAspectTypeRequest): request = catalog.GetAspectTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2069,8 +2061,8 @@ def sample_create_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry_group, entry_group_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2078,10 +2070,8 @@ def sample_create_entry_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.CreateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.CreateEntryGroupRequest): request = catalog.CreateEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2196,8 +2186,8 @@ def sample_update_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry_group, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2205,10 +2195,8 @@ def sample_update_entry_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.UpdateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.UpdateEntryGroupRequest): request = catalog.UpdateEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2326,8 +2314,8 @@ def sample_delete_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2335,10 +2323,8 @@ def sample_delete_entry_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.DeleteEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.DeleteEntryGroupRequest): request = catalog.DeleteEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2444,8 +2430,8 @@ def sample_list_entry_groups(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2453,10 +2439,8 @@ def sample_list_entry_groups(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.ListEntryGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.ListEntryGroupsRequest): request = catalog.ListEntryGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2557,8 +2541,8 @@ def sample_get_entry_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2566,10 +2550,8 @@ def sample_get_entry_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.GetEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.GetEntryGroupRequest): request = catalog.GetEntryGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2700,8 +2682,8 @@ def sample_create_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entry, entry_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2709,10 +2691,8 @@ def sample_create_entry(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.CreateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.CreateEntryRequest): request = catalog.CreateEntryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2821,8 +2801,8 @@ def sample_update_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([entry, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2830,10 +2810,8 @@ def sample_update_entry(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.UpdateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.UpdateEntryRequest): request = catalog.UpdateEntryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2930,8 +2908,8 @@ def sample_delete_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2939,10 +2917,8 @@ def sample_delete_entry(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.DeleteEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.DeleteEntryRequest): request = catalog.DeleteEntryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3036,8 +3012,8 @@ def sample_list_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3045,10 +3021,8 @@ def sample_list_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.ListEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.ListEntriesRequest): request = catalog.ListEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3150,8 +3124,8 @@ def sample_get_entry(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3159,10 +3133,8 @@ def sample_get_entry(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.GetEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.GetEntryRequest): request = catalog.GetEntryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3248,10 +3220,8 @@ def sample_lookup_entry(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a catalog.LookupEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.LookupEntryRequest): request = catalog.LookupEntryRequest(request) @@ -3351,8 +3321,8 @@ def sample_search_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, query]) if request is not None and has_flattened_params: raise ValueError( @@ -3360,10 +3330,8 @@ def sample_search_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a catalog.SearchEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, catalog.SearchEntriesRequest): request = catalog.SearchEntriesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py index ca40465a92d4..f7c406260853 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -58,7 +58,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -78,14 +78,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -95,11 +98,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -126,7 +129,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -167,7 +170,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py index 2a952c2c1846..76e1665f5289 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -73,7 +75,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -103,7 +104,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -123,15 +124,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -141,11 +145,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -172,7 +176,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -212,7 +216,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -855,6 +861,231 @@ def search_entries( ) return self._stubs["search_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entry_type: gapic_v1.method_async.wrap_method( + self.create_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_type: gapic_v1.method_async.wrap_method( + self.update_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_type: gapic_v1.method_async.wrap_method( + self.delete_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_types: gapic_v1.method_async.wrap_method( + self.list_entry_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_type: gapic_v1.method_async.wrap_method( + self.get_entry_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_aspect_type: gapic_v1.method_async.wrap_method( + self.create_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_aspect_type: gapic_v1.method_async.wrap_method( + self.update_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_aspect_type: gapic_v1.method_async.wrap_method( + self.delete_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_aspect_types: gapic_v1.method_async.wrap_method( + self.list_aspect_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aspect_type: gapic_v1.method_async.wrap_method( + self.get_aspect_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry_group: gapic_v1.method_async.wrap_method( + self.create_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_group: gapic_v1.method_async.wrap_method( + self.update_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_group: gapic_v1.method_async.wrap_method( + self.delete_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_groups: gapic_v1.method_async.wrap_method( + self.list_entry_groups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_group: gapic_v1.method_async.wrap_method( + self.get_entry_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry: gapic_v1.method_async.wrap_method( + self.create_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry: gapic_v1.method_async.wrap_method( + self.update_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry: gapic_v1.method_async.wrap_method( + self.delete_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entries: gapic_v1.method_async.wrap_method( + self.list_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_entry: gapic_v1.method_async.wrap_method( + self.get_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lookup_entry: gapic_v1.method_async.wrap_method( + self.lookup_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.search_entries: gapic_v1.method_async.wrap_method( + self.search_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 5c810bafe89e..d00066d534e2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -205,7 +206,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ContentServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -217,9 +220,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ContentServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -335,8 +340,8 @@ async def sample_create_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, content]) if request is not None and has_flattened_params: raise ValueError( @@ -344,7 +349,10 @@ async def sample_create_content(): "the individual field arguments should be set." ) - request = gcd_content.CreateContentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -355,11 +363,9 @@ async def sample_create_content(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_content, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_content + ] # Certain fields should be provided within the metadata header; # add these here. @@ -452,8 +458,8 @@ async def sample_update_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([content, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -461,7 +467,10 @@ async def sample_update_content(): "the individual field arguments should be set." ) - request = gcd_content.UpdateContentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -472,11 +481,9 @@ async def sample_update_content(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_content, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_content + ] # Certain fields should be provided within the metadata header; # add these here. @@ -551,8 +558,8 @@ async def sample_delete_content(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -560,7 +567,10 @@ async def sample_delete_content(): "the individual field arguments should be set." ) - request = content.DeleteContentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -569,11 +579,9 @@ async def sample_delete_content(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_content, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_content + ] # Certain fields should be provided within the metadata header; # add these here. @@ -652,8 +660,8 @@ async def sample_get_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -661,7 +669,10 @@ async def sample_get_content(): "the individual field arguments should be set." ) - request = content.GetContentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -670,20 +681,9 @@ async def sample_get_content(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_content + ] # Certain fields should be provided within the metadata header; # add these here. @@ -802,8 +802,8 @@ async def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -811,8 +811,8 @@ async def sample_get_iam_policy(): "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: @@ -820,20 +820,9 @@ async def sample_get_iam_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -940,8 +929,8 @@ async def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: @@ -949,11 +938,9 @@ async def sample_set_iam_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1037,8 +1024,8 @@ async def sample_test_iam_permissions(): Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: @@ -1046,20 +1033,9 @@ async def sample_test_iam_permissions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1146,8 +1122,8 @@ async def sample_list_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1155,7 +1131,10 @@ async def sample_list_content(): "the individual field arguments should be set." ) - request = content.ListContentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1164,20 +1143,9 @@ async def sample_list_content(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_content + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 2f8cd3d12f20..b15c0f636bce 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -560,7 +561,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ContentServiceTransport]] = None, + transport: Optional[ + Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -572,9 +575,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ContentServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -683,8 +688,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ContentServiceTransport], Callable[..., ContentServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ContentServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -768,8 +780,8 @@ def sample_create_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, content]) if request is not None and has_flattened_params: raise ValueError( @@ -777,10 +789,8 @@ def sample_create_content(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gcd_content.CreateContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gcd_content.CreateContentRequest): request = gcd_content.CreateContentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -885,8 +895,8 @@ def sample_update_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([content, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -894,10 +904,8 @@ def sample_update_content(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gcd_content.UpdateContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gcd_content.UpdateContentRequest): request = gcd_content.UpdateContentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -984,8 +992,8 @@ def sample_delete_content(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -993,10 +1001,8 @@ def sample_delete_content(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a content.DeleteContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, content.DeleteContentRequest): request = content.DeleteContentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1085,8 +1091,8 @@ def sample_get_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1094,10 +1100,8 @@ def sample_get_content(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a content.GetContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, content.GetContentRequest): request = content.GetContentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1226,8 +1230,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1236,8 +1240,8 @@ def sample_get_iam_policy(): ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -1355,8 +1359,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -1449,8 +1453,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. @@ -1545,8 +1549,8 @@ def sample_list_content(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1554,10 +1558,8 @@ def sample_list_content(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a content.ListContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, content.ListContentRequest): request = content.ListContentRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py index e6f7f9f46480..741e4c5a7513 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -56,7 +56,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -76,14 +76,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -93,11 +96,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -123,7 +126,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -164,7 +167,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py index 04dcc2bd3e49..1f9f3080b75b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -71,7 +73,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -101,7 +102,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -121,15 +122,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -139,11 +143,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -169,7 +173,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -209,7 +213,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -469,6 +475,87 @@ def list_content( ) return self._stubs["list_content"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_content: gapic_v1.method_async.wrap_method( + self.create_content, + default_timeout=60.0, + client_info=client_info, + ), + self.update_content: gapic_v1.method_async.wrap_method( + self.update_content, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_content: gapic_v1.method_async.wrap_method( + self.delete_content, + default_timeout=60.0, + client_info=client_info, + ), + self.get_content: gapic_v1.method_async.wrap_method( + self.get_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method_async.wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method_async.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method_async.wrap_method( + self.test_iam_permissions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_content: gapic_v1.method_async.wrap_method( + self.list_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 8b82e7780686..93c9d2b3f067 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -221,7 +222,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataScanServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, DataScanServiceTransport, Callable[..., DataScanServiceTransport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -233,9 +238,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DataScanServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -384,8 +391,8 @@ async def sample_create_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_scan, data_scan_id]) if request is not None and has_flattened_params: raise ValueError( @@ -393,7 +400,10 @@ async def sample_create_data_scan(): "the individual field arguments should be set." ) - request = datascans.CreateDataScanRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.CreateDataScanRequest): + request = datascans.CreateDataScanRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -406,11 +416,9 @@ async def sample_create_data_scan(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_data_scan, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_scan + ] # Certain fields should be provided within the metadata header; # add these here. @@ -527,8 +535,8 @@ async def sample_update_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_scan, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -536,7 +544,10 @@ async def sample_update_data_scan(): "the individual field arguments should be set." ) - request = datascans.UpdateDataScanRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.UpdateDataScanRequest): + request = datascans.UpdateDataScanRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -547,11 +558,9 @@ async def sample_update_data_scan(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_data_scan, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_scan + ] # Certain fields should be provided within the metadata header; # add these here. @@ -660,8 +669,8 @@ async def sample_delete_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -669,7 +678,10 @@ async def sample_delete_data_scan(): "the individual field arguments should be set." ) - request = datascans.DeleteDataScanRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.DeleteDataScanRequest): + request = datascans.DeleteDataScanRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -678,11 +690,9 @@ async def sample_delete_data_scan(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_data_scan, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_scan + ] # Certain fields should be provided within the metadata header; # add these here. @@ -785,8 +795,8 @@ async def sample_get_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -794,7 +804,10 @@ async def sample_get_data_scan(): "the individual field arguments should be set." ) - request = datascans.GetDataScanRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanRequest): + request = datascans.GetDataScanRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -803,11 +816,9 @@ async def sample_get_data_scan(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_data_scan, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_scan + ] # Certain fields should be provided within the metadata header; # add these here. @@ -895,8 +906,8 @@ async def sample_list_data_scans(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -904,7 +915,10 @@ async def sample_list_data_scans(): "the individual field arguments should be set." ) - request = datascans.ListDataScansRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScansRequest): + request = datascans.ListDataScansRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -913,11 +927,9 @@ async def sample_list_data_scans(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_data_scans, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_scans + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1011,8 +1023,8 @@ async def sample_run_data_scan(): Run DataScan Response. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1020,7 +1032,10 @@ async def sample_run_data_scan(): "the individual field arguments should be set." ) - request = datascans.RunDataScanRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.RunDataScanRequest): + request = datascans.RunDataScanRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1029,11 +1044,9 @@ async def sample_run_data_scan(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_data_scan, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_data_scan + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1118,8 +1131,8 @@ async def sample_get_data_scan_job(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1127,7 +1140,10 @@ async def sample_get_data_scan_job(): "the individual field arguments should be set." ) - request = datascans.GetDataScanJobRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanJobRequest): + request = datascans.GetDataScanJobRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1136,11 +1152,9 @@ async def sample_get_data_scan_job(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_data_scan_job, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_scan_job + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1229,8 +1243,8 @@ async def sample_list_data_scan_jobs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1238,7 +1252,10 @@ async def sample_list_data_scan_jobs(): "the individual field arguments should be set." ) - request = datascans.ListDataScanJobsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScanJobsRequest): + request = datascans.ListDataScanJobsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1247,11 +1264,9 @@ async def sample_list_data_scan_jobs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_data_scan_jobs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_scan_jobs + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1350,8 +1365,8 @@ async def sample_generate_data_quality_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1359,7 +1374,10 @@ async def sample_generate_data_quality_rules(): "the individual field arguments should be set." ) - request = datascans.GenerateDataQualityRulesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GenerateDataQualityRulesRequest): + request = datascans.GenerateDataQualityRulesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1368,11 +1386,9 @@ async def sample_generate_data_quality_rules(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_data_quality_rules, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_data_quality_rules + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index f1d2f547258a..ba6d09e94b59 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -598,7 +599,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataScanServiceTransport]] = None, + transport: Optional[ + Union[ + str, DataScanServiceTransport, Callable[..., DataScanServiceTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -610,9 +615,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DataScanServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -721,8 +728,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataScanServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -839,8 +853,8 @@ def sample_create_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_scan, data_scan_id]) if request is not None and has_flattened_params: raise ValueError( @@ -848,10 +862,8 @@ def sample_create_data_scan(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.CreateDataScanRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.CreateDataScanRequest): request = datascans.CreateDataScanRequest(request) # If we have keyword arguments corresponding to fields on the @@ -982,8 +994,8 @@ def sample_update_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_scan, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -991,10 +1003,8 @@ def sample_update_data_scan(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.UpdateDataScanRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.UpdateDataScanRequest): request = datascans.UpdateDataScanRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1115,8 +1125,8 @@ def sample_delete_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1124,10 +1134,8 @@ def sample_delete_data_scan(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.DeleteDataScanRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.DeleteDataScanRequest): request = datascans.DeleteDataScanRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1240,8 +1248,8 @@ def sample_get_data_scan(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1249,10 +1257,8 @@ def sample_get_data_scan(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.GetDataScanRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.GetDataScanRequest): request = datascans.GetDataScanRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1350,8 +1356,8 @@ def sample_list_data_scans(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1359,10 +1365,8 @@ def sample_list_data_scans(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.ListDataScansRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.ListDataScansRequest): request = datascans.ListDataScansRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1466,8 +1470,8 @@ def sample_run_data_scan(): Run DataScan Response. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1475,10 +1479,8 @@ def sample_run_data_scan(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.RunDataScanRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.RunDataScanRequest): request = datascans.RunDataScanRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1573,8 +1575,8 @@ def sample_get_data_scan_job(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1582,10 +1584,8 @@ def sample_get_data_scan_job(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.GetDataScanJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.GetDataScanJobRequest): request = datascans.GetDataScanJobRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1684,8 +1684,8 @@ def sample_list_data_scan_jobs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1693,10 +1693,8 @@ def sample_list_data_scan_jobs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.ListDataScanJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.ListDataScanJobsRequest): request = datascans.ListDataScanJobsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1805,8 +1803,8 @@ def sample_generate_data_quality_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1814,10 +1812,8 @@ def sample_generate_data_quality_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a datascans.GenerateDataQualityRulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, datascans.GenerateDataQualityRulesRequest): request = datascans.GenerateDataQualityRulesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py index d871c9da8750..d6a031505882 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py @@ -56,7 +56,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -76,14 +76,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -93,11 +96,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,7 +127,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -165,7 +168,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py index c2be21505130..fa437f3e76ee 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -71,7 +73,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -101,7 +102,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -121,15 +122,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -139,11 +143,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -170,7 +174,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -210,7 +214,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -506,6 +512,56 @@ def generate_data_quality_rules( ) return self._stubs["generate_data_quality_rules"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_scan: gapic_v1.method_async.wrap_method( + self.create_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.update_data_scan: gapic_v1.method_async.wrap_method( + self.update_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_scan: gapic_v1.method_async.wrap_method( + self.delete_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan: gapic_v1.method_async.wrap_method( + self.get_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scans: gapic_v1.method_async.wrap_method( + self.list_data_scans, + default_timeout=None, + client_info=client_info, + ), + self.run_data_scan: gapic_v1.method_async.wrap_method( + self.run_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan_job: gapic_v1.method_async.wrap_method( + self.get_data_scan_job, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scan_jobs: gapic_v1.method_async.wrap_method( + self.list_data_scan_jobs, + default_timeout=None, + client_info=client_info, + ), + self.generate_data_quality_rules: gapic_v1.method_async.wrap_method( + self.generate_data_quality_rules, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index ef7e945d94f3..22f1ab335d24 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -222,7 +223,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataTaxonomyServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + DataTaxonomyServiceTransport, + Callable[..., DataTaxonomyServiceTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -234,9 +241,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DataTaxonomyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -373,8 +382,8 @@ async def sample_create_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) if request is not None and has_flattened_params: raise ValueError( @@ -382,7 +391,10 @@ async def sample_create_data_taxonomy(): "the individual field arguments should be set." ) - request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): + request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -395,11 +407,9 @@ async def sample_create_data_taxonomy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_data_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_taxonomy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -504,8 +514,8 @@ async def sample_update_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_taxonomy, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -513,7 +523,10 @@ async def sample_update_data_taxonomy(): "the individual field arguments should be set." ) - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -524,11 +537,9 @@ async def sample_update_data_taxonomy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_data_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_taxonomy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -636,8 +647,8 @@ async def sample_delete_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -645,7 +656,10 @@ async def sample_delete_data_taxonomy(): "the individual field arguments should be set." ) - request = data_taxonomy.DeleteDataTaxonomyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): + request = data_taxonomy.DeleteDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -654,11 +668,9 @@ async def sample_delete_data_taxonomy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_data_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_taxonomy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -755,8 +767,8 @@ async def sample_list_data_taxonomies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -764,7 +776,10 @@ async def sample_list_data_taxonomies(): "the individual field arguments should be set." ) - request = data_taxonomy.ListDataTaxonomiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): + request = data_taxonomy.ListDataTaxonomiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -773,11 +788,9 @@ async def sample_list_data_taxonomies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_data_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_taxonomies + ] # Certain fields should be provided within the metadata header; # add these here. @@ -872,8 +885,8 @@ async def sample_get_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -881,7 +894,10 @@ async def sample_get_data_taxonomy(): "the individual field arguments should be set." ) - request = data_taxonomy.GetDataTaxonomyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): + request = data_taxonomy.GetDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -890,11 +906,9 @@ async def sample_get_data_taxonomy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_data_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_taxonomy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1011,8 +1025,8 @@ async def sample_create_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [parent, data_attribute_binding, data_attribute_binding_id] ) @@ -1022,7 +1036,10 @@ async def sample_create_data_attribute_binding(): "the individual field arguments should be set." ) - request = data_taxonomy.CreateDataAttributeBindingRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): + request = data_taxonomy.CreateDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1035,11 +1052,9 @@ async def sample_create_data_attribute_binding(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_data_attribute_binding, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_attribute_binding + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1146,8 +1161,8 @@ async def sample_update_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_attribute_binding, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1155,7 +1170,10 @@ async def sample_update_data_attribute_binding(): "the individual field arguments should be set." ) - request = data_taxonomy.UpdateDataAttributeBindingRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): + request = data_taxonomy.UpdateDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1166,11 +1184,9 @@ async def sample_update_data_attribute_binding(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_data_attribute_binding, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_attribute_binding + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1281,8 +1297,8 @@ async def sample_delete_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1290,7 +1306,10 @@ async def sample_delete_data_attribute_binding(): "the individual field arguments should be set." ) - request = data_taxonomy.DeleteDataAttributeBindingRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): + request = data_taxonomy.DeleteDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1299,11 +1318,9 @@ async def sample_delete_data_attribute_binding(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_data_attribute_binding, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_attribute_binding + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1400,8 +1417,8 @@ async def sample_list_data_attribute_bindings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1409,7 +1426,10 @@ async def sample_list_data_attribute_bindings(): "the individual field arguments should be set." ) - request = data_taxonomy.ListDataAttributeBindingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): + request = data_taxonomy.ListDataAttributeBindingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1418,11 +1438,9 @@ async def sample_list_data_attribute_bindings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_data_attribute_bindings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_attribute_bindings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1517,8 +1535,8 @@ async def sample_get_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1526,7 +1544,10 @@ async def sample_get_data_attribute_binding(): "the individual field arguments should be set." ) - request = data_taxonomy.GetDataAttributeBindingRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): + request = data_taxonomy.GetDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1535,11 +1556,9 @@ async def sample_get_data_attribute_binding(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_data_attribute_binding, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_attribute_binding + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1652,8 +1671,8 @@ async def sample_create_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_attribute, data_attribute_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1661,7 +1680,10 @@ async def sample_create_data_attribute(): "the individual field arguments should be set." ) - request = data_taxonomy.CreateDataAttributeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): + request = data_taxonomy.CreateDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1674,11 +1696,9 @@ async def sample_create_data_attribute(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_data_attribute, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_attribute + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1783,8 +1803,8 @@ async def sample_update_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_attribute, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1792,7 +1812,10 @@ async def sample_update_data_attribute(): "the individual field arguments should be set." ) - request = data_taxonomy.UpdateDataAttributeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): + request = data_taxonomy.UpdateDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1803,11 +1826,9 @@ async def sample_update_data_attribute(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_data_attribute, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_attribute + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1913,8 +1934,8 @@ async def sample_delete_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1922,7 +1943,10 @@ async def sample_delete_data_attribute(): "the individual field arguments should be set." ) - request = data_taxonomy.DeleteDataAttributeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): + request = data_taxonomy.DeleteDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1931,11 +1955,9 @@ async def sample_delete_data_attribute(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_data_attribute, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_attribute + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2029,8 +2051,8 @@ async def sample_list_data_attributes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2038,7 +2060,10 @@ async def sample_list_data_attributes(): "the individual field arguments should be set." ) - request = data_taxonomy.ListDataAttributesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributesRequest): + request = data_taxonomy.ListDataAttributesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2047,11 +2072,9 @@ async def sample_list_data_attributes(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_data_attributes, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_attributes + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2146,8 +2169,8 @@ async def sample_get_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2155,7 +2178,10 @@ async def sample_get_data_attribute(): "the individual field arguments should be set." ) - request = data_taxonomy.GetDataAttributeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeRequest): + request = data_taxonomy.GetDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2164,11 +2190,9 @@ async def sample_get_data_attribute(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_data_attribute, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_attribute + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 35eb06a89c5d..4715bf38f2a8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -588,7 +589,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTaxonomyServiceTransport]] = None, + transport: Optional[ + Union[ + str, + DataTaxonomyServiceTransport, + Callable[..., DataTaxonomyServiceTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -600,9 +607,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DataTaxonomyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -711,8 +720,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DataTaxonomyServiceTransport], + Callable[..., DataTaxonomyServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataTaxonomyServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -817,8 +834,8 @@ def sample_create_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) if request is not None and has_flattened_params: raise ValueError( @@ -826,10 +843,8 @@ def sample_create_data_taxonomy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gcd_data_taxonomy.CreateDataTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -948,8 +963,8 @@ def sample_update_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_taxonomy, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -957,10 +972,8 @@ def sample_update_data_taxonomy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gcd_data_taxonomy.UpdateDataTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1080,8 +1093,8 @@ def sample_delete_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1089,10 +1102,8 @@ def sample_delete_data_taxonomy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.DeleteDataTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): request = data_taxonomy.DeleteDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1199,8 +1210,8 @@ def sample_list_data_taxonomies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1208,10 +1219,8 @@ def sample_list_data_taxonomies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.ListDataTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): request = data_taxonomy.ListDataTaxonomiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1316,8 +1325,8 @@ def sample_get_data_taxonomy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1325,10 +1334,8 @@ def sample_get_data_taxonomy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.GetDataTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): request = data_taxonomy.GetDataTaxonomyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1455,8 +1462,8 @@ def sample_create_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [parent, data_attribute_binding, data_attribute_binding_id] ) @@ -1466,10 +1473,8 @@ def sample_create_data_attribute_binding(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.CreateDataAttributeBindingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): request = data_taxonomy.CreateDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1592,8 +1597,8 @@ def sample_update_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_attribute_binding, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1601,10 +1606,8 @@ def sample_update_data_attribute_binding(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.UpdateDataAttributeBindingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): request = data_taxonomy.UpdateDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1729,8 +1732,8 @@ def sample_delete_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1738,10 +1741,8 @@ def sample_delete_data_attribute_binding(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.DeleteDataAttributeBindingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): request = data_taxonomy.DeleteDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1850,8 +1851,8 @@ def sample_list_data_attribute_bindings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1859,10 +1860,8 @@ def sample_list_data_attribute_bindings(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.ListDataAttributeBindingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): request = data_taxonomy.ListDataAttributeBindingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1969,8 +1968,8 @@ def sample_get_data_attribute_binding(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1978,10 +1977,8 @@ def sample_get_data_attribute_binding(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.GetDataAttributeBindingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): request = data_taxonomy.GetDataAttributeBindingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2106,8 +2103,8 @@ def sample_create_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, data_attribute, data_attribute_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2115,10 +2112,8 @@ def sample_create_data_attribute(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.CreateDataAttributeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): request = data_taxonomy.CreateDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2237,8 +2232,8 @@ def sample_update_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([data_attribute, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2246,10 +2241,8 @@ def sample_update_data_attribute(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.UpdateDataAttributeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): request = data_taxonomy.UpdateDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2367,8 +2360,8 @@ def sample_delete_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2376,10 +2369,8 @@ def sample_delete_data_attribute(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.DeleteDataAttributeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): request = data_taxonomy.DeleteDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2483,8 +2474,8 @@ def sample_list_data_attributes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2492,10 +2483,8 @@ def sample_list_data_attributes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.ListDataAttributesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.ListDataAttributesRequest): request = data_taxonomy.ListDataAttributesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2600,8 +2589,8 @@ def sample_get_data_attribute(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2609,10 +2598,8 @@ def sample_get_data_attribute(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a data_taxonomy.GetDataAttributeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, data_taxonomy.GetDataAttributeRequest): request = data_taxonomy.GetDataAttributeRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py index be22402fe016..a7d586c921ec 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py @@ -56,7 +56,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -76,14 +76,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -93,11 +96,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,7 +127,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -165,7 +168,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py index e7209a25d37c..03ba6779ba85 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -71,7 +73,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -101,7 +102,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -121,15 +122,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -139,11 +143,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -170,7 +174,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -210,7 +214,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -696,6 +702,86 @@ def get_data_attribute( ) return self._stubs["get_data_attribute"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_taxonomy: gapic_v1.method_async.wrap_method( + self.create_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_data_taxonomy: gapic_v1.method_async.wrap_method( + self.update_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_taxonomy: gapic_v1.method_async.wrap_method( + self.delete_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_data_taxonomies: gapic_v1.method_async.wrap_method( + self.list_data_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_data_taxonomy: gapic_v1.method_async.wrap_method( + self.get_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute_binding: gapic_v1.method_async.wrap_method( + self.create_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute_binding: gapic_v1.method_async.wrap_method( + self.update_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute_binding: gapic_v1.method_async.wrap_method( + self.delete_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attribute_bindings: gapic_v1.method_async.wrap_method( + self.list_data_attribute_bindings, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute_binding: gapic_v1.method_async.wrap_method( + self.get_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute: gapic_v1.method_async.wrap_method( + self.create_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute: gapic_v1.method_async.wrap_method( + self.update_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute: gapic_v1.method_async.wrap_method( + self.delete_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attributes: gapic_v1.method_async.wrap_method( + self.list_data_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute: gapic_v1.method_async.wrap_method( + self.get_data_attribute, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index bdae55a2e661..897abb0d778b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -224,7 +225,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataplexServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, DataplexServiceTransport, Callable[..., DataplexServiceTransport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -236,9 +241,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DataplexServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -383,8 +390,8 @@ async def sample_create_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, lake, lake_id]) if request is not None and has_flattened_params: raise ValueError( @@ -392,7 +399,10 @@ async def sample_create_lake(): "the individual field arguments should be set." ) - request = service.CreateLakeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateLakeRequest): + request = service.CreateLakeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -405,11 +415,9 @@ async def sample_create_lake(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_lake, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_lake + ] # Certain fields should be provided within the metadata header; # add these here. @@ -520,8 +528,8 @@ async def sample_update_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([lake, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -529,7 +537,10 @@ async def sample_update_lake(): "the individual field arguments should be set." ) - request = service.UpdateLakeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateLakeRequest): + request = service.UpdateLakeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -540,11 +551,9 @@ async def sample_update_lake(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_lake, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_lake + ] # Certain fields should be provided within the metadata header; # add these here. @@ -651,8 +660,8 @@ async def sample_delete_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -660,7 +669,10 @@ async def sample_delete_lake(): "the individual field arguments should be set." ) - request = service.DeleteLakeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteLakeRequest): + request = service.DeleteLakeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -669,11 +681,9 @@ async def sample_delete_lake(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_lake, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_lake + ] # Certain fields should be provided within the metadata header; # add these here. @@ -769,8 +779,8 @@ async def sample_list_lakes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -778,7 +788,10 @@ async def sample_list_lakes(): "the individual field arguments should be set." ) - request = service.ListLakesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakesRequest): + request = service.ListLakesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -787,20 +800,9 @@ async def sample_list_lakes(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_lakes, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_lakes + ] # Certain fields should be provided within the metadata header; # add these here. @@ -905,8 +907,8 @@ async def sample_get_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -914,7 +916,10 @@ async def sample_get_lake(): "the individual field arguments should be set." ) - request = service.GetLakeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetLakeRequest): + request = service.GetLakeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -923,20 +928,7 @@ async def sample_get_lake(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_lake, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_lake] # Certain fields should be provided within the metadata header; # add these here. @@ -1022,8 +1014,8 @@ async def sample_list_lake_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1031,7 +1023,10 @@ async def sample_list_lake_actions(): "the individual field arguments should be set." ) - request = service.ListLakeActionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakeActionsRequest): + request = service.ListLakeActionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1040,20 +1035,9 @@ async def sample_list_lake_actions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_lake_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_lake_actions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1184,8 +1168,8 @@ async def sample_create_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, zone, zone_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1193,7 +1177,10 @@ async def sample_create_zone(): "the individual field arguments should be set." ) - request = service.CreateZoneRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1206,11 +1193,9 @@ async def sample_create_zone(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_zone, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_zone + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1319,8 +1304,8 @@ async def sample_update_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([zone, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1328,7 +1313,10 @@ async def sample_update_zone(): "the individual field arguments should be set." ) - request = service.UpdateZoneRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1339,11 +1327,9 @@ async def sample_update_zone(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_zone, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_zone + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1450,8 +1436,8 @@ async def sample_delete_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1459,7 +1445,10 @@ async def sample_delete_zone(): "the individual field arguments should be set." ) - request = service.DeleteZoneRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1468,11 +1457,9 @@ async def sample_delete_zone(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_zone, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_zone + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1566,8 +1553,8 @@ async def sample_list_zones(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1575,7 +1562,10 @@ async def sample_list_zones(): "the individual field arguments should be set." ) - request = service.ListZonesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1584,20 +1574,9 @@ async def sample_list_zones(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_zones, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_zones + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1694,8 +1673,8 @@ async def sample_get_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1703,7 +1682,10 @@ async def sample_get_zone(): "the individual field arguments should be set." ) - request = service.GetZoneRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1712,20 +1694,7 @@ async def sample_get_zone(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_zone, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] # Certain fields should be provided within the metadata header; # add these here. @@ -1811,8 +1780,8 @@ async def sample_list_zone_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1820,7 +1789,10 @@ async def sample_list_zone_actions(): "the individual field arguments should be set." ) - request = service.ListZoneActionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZoneActionsRequest): + request = service.ListZoneActionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1829,20 +1801,9 @@ async def sample_list_zone_actions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_zone_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_zone_actions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1966,8 +1927,8 @@ async def sample_create_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, asset, asset_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1975,7 +1936,10 @@ async def sample_create_asset(): "the individual field arguments should be set." ) - request = service.CreateAssetRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateAssetRequest): + request = service.CreateAssetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1988,11 +1952,9 @@ async def sample_create_asset(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_asset, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_asset + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2097,8 +2059,8 @@ async def sample_update_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([asset, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2106,7 +2068,10 @@ async def sample_update_asset(): "the individual field arguments should be set." ) - request = service.UpdateAssetRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateAssetRequest): + request = service.UpdateAssetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2117,11 +2082,9 @@ async def sample_update_asset(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_asset, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_asset + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2229,8 +2192,8 @@ async def sample_delete_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2238,7 +2201,10 @@ async def sample_delete_asset(): "the individual field arguments should be set." ) - request = service.DeleteAssetRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteAssetRequest): + request = service.DeleteAssetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2247,11 +2213,9 @@ async def sample_delete_asset(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_asset, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_asset + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2345,8 +2309,8 @@ async def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2354,7 +2318,10 @@ async def sample_list_assets(): "the individual field arguments should be set." ) - request = service.ListAssetsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetsRequest): + request = service.ListAssetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2363,20 +2330,9 @@ async def sample_list_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_assets, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_assets + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2468,8 +2424,8 @@ async def sample_get_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2477,7 +2433,10 @@ async def sample_get_asset(): "the individual field arguments should be set." ) - request = service.GetAssetRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetAssetRequest): + request = service.GetAssetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2486,20 +2445,9 @@ async def sample_get_asset(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_asset, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_asset + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2585,8 +2533,8 @@ async def sample_list_asset_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2594,7 +2542,10 @@ async def sample_list_asset_actions(): "the individual field arguments should be set." ) - request = service.ListAssetActionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetActionsRequest): + request = service.ListAssetActionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2603,20 +2554,9 @@ async def sample_list_asset_actions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_asset_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_asset_actions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2734,8 +2674,8 @@ async def sample_create_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, task, task_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2743,7 +2683,10 @@ async def sample_create_task(): "the individual field arguments should be set." ) - request = service.CreateTaskRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTaskRequest): + request = service.CreateTaskRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2756,11 +2699,9 @@ async def sample_create_task(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_task, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_task + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2869,8 +2810,8 @@ async def sample_update_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([task, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2878,7 +2819,10 @@ async def sample_update_task(): "the individual field arguments should be set." ) - request = service.UpdateTaskRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTaskRequest): + request = service.UpdateTaskRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2889,11 +2833,9 @@ async def sample_update_task(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_task, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_task + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2999,8 +2941,8 @@ async def sample_delete_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3008,7 +2950,10 @@ async def sample_delete_task(): "the individual field arguments should be set." ) - request = service.DeleteTaskRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTaskRequest): + request = service.DeleteTaskRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3017,11 +2962,9 @@ async def sample_delete_task(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_task, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_task + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3115,8 +3058,8 @@ async def sample_list_tasks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3124,7 +3067,10 @@ async def sample_list_tasks(): "the individual field arguments should be set." ) - request = service.ListTasksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTasksRequest): + request = service.ListTasksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3133,20 +3079,9 @@ async def sample_list_tasks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_tasks + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3235,8 +3170,8 @@ async def sample_get_task(): A task represents a user-visible job. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3244,7 +3179,10 @@ async def sample_get_task(): "the individual field arguments should be set." ) - request = service.GetTaskRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTaskRequest): + request = service.GetTaskRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3253,20 +3191,7 @@ async def sample_get_task(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_task, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] # Certain fields should be provided within the metadata header; # add these here. @@ -3352,8 +3277,8 @@ async def sample_list_jobs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3361,7 +3286,10 @@ async def sample_list_jobs(): "the individual field arguments should be set." ) - request = service.ListJobsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListJobsRequest): + request = service.ListJobsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3370,20 +3298,9 @@ async def sample_list_jobs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_jobs + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3472,8 +3389,8 @@ async def sample_run_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3481,7 +3398,10 @@ async def sample_run_task(): "the individual field arguments should be set." ) - request = service.RunTaskRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RunTaskRequest): + request = service.RunTaskRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3490,11 +3410,7 @@ async def sample_run_task(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_task, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.run_task] # Certain fields should be provided within the metadata header; # add these here. @@ -3576,8 +3492,8 @@ async def sample_get_job(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3585,7 +3501,10 @@ async def sample_get_job(): "the individual field arguments should be set." ) - request = service.GetJobRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetJobRequest): + request = service.GetJobRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3594,20 +3513,7 @@ async def sample_get_job(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] # Certain fields should be provided within the metadata header; # add these here. @@ -3680,8 +3586,8 @@ async def sample_cancel_job(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3689,7 +3595,10 @@ async def sample_cancel_job(): "the individual field arguments should be set." ) - request = service.CancelJobRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CancelJobRequest): + request = service.CancelJobRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3698,11 +3607,9 @@ async def sample_cancel_job(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_job + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3812,8 +3719,8 @@ async def sample_create_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, environment, environment_id]) if request is not None and has_flattened_params: raise ValueError( @@ -3821,7 +3728,10 @@ async def sample_create_environment(): "the individual field arguments should be set." ) - request = service.CreateEnvironmentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3834,11 +3744,9 @@ async def sample_create_environment(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_environment, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_environment + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3943,8 +3851,8 @@ async def sample_update_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([environment, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3952,7 +3860,10 @@ async def sample_update_environment(): "the individual field arguments should be set." ) - request = service.UpdateEnvironmentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3963,11 +3874,9 @@ async def sample_update_environment(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_environment, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_environment + ] # Certain fields should be provided within the metadata header; # add these here. @@ -4075,8 +3984,8 @@ async def sample_delete_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4084,7 +3993,10 @@ async def sample_delete_environment(): "the individual field arguments should be set." ) - request = service.DeleteEnvironmentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4093,11 +4005,9 @@ async def sample_delete_environment(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_environment, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_environment + ] # Certain fields should be provided within the metadata header; # add these here. @@ -4191,8 +4101,8 @@ async def sample_list_environments(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -4200,7 +4110,10 @@ async def sample_list_environments(): "the individual field arguments should be set." ) - request = service.ListEnvironmentsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4209,20 +4122,9 @@ async def sample_list_environments(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_environments, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_environments + ] # Certain fields should be provided within the metadata header; # add these here. @@ -4314,8 +4216,8 @@ async def sample_get_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4323,7 +4225,10 @@ async def sample_get_environment(): "the individual field arguments should be set." ) - request = service.GetEnvironmentRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4332,20 +4237,9 @@ async def sample_get_environment(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_environment, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_environment + ] # Certain fields should be provided within the metadata header; # add these here. @@ -4431,8 +4325,8 @@ async def sample_list_sessions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -4440,7 +4334,10 @@ async def sample_list_sessions(): "the individual field arguments should be set." ) - request = service.ListSessionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4449,11 +4346,9 @@ async def sample_list_sessions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sessions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sessions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index df2c317a9300..9377192e9538 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -721,7 +722,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataplexServiceTransport]] = None, + transport: Optional[ + Union[ + str, DataplexServiceTransport, Callable[..., DataplexServiceTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -733,9 +738,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DataplexServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -844,8 +851,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataplexServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -958,8 +972,8 @@ def sample_create_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, lake, lake_id]) if request is not None and has_flattened_params: raise ValueError( @@ -967,10 +981,8 @@ def sample_create_lake(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CreateLakeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CreateLakeRequest): request = service.CreateLakeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1095,8 +1107,8 @@ def sample_update_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([lake, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1104,10 +1116,8 @@ def sample_update_lake(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.UpdateLakeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.UpdateLakeRequest): request = service.UpdateLakeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1226,8 +1236,8 @@ def sample_delete_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1235,10 +1245,8 @@ def sample_delete_lake(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.DeleteLakeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.DeleteLakeRequest): request = service.DeleteLakeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1344,8 +1352,8 @@ def sample_list_lakes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1353,10 +1361,8 @@ def sample_list_lakes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListLakesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListLakesRequest): request = service.ListLakesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1471,8 +1477,8 @@ def sample_get_lake(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1480,10 +1486,8 @@ def sample_get_lake(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetLakeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetLakeRequest): request = service.GetLakeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1579,8 +1583,8 @@ def sample_list_lake_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1588,10 +1592,8 @@ def sample_list_lake_actions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListLakeActionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListLakeActionsRequest): request = service.ListLakeActionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1732,8 +1734,8 @@ def sample_create_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, zone, zone_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1741,10 +1743,8 @@ def sample_create_zone(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CreateZoneRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CreateZoneRequest): request = service.CreateZoneRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1867,8 +1867,8 @@ def sample_update_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([zone, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1876,10 +1876,8 @@ def sample_update_zone(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.UpdateZoneRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.UpdateZoneRequest): request = service.UpdateZoneRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1998,8 +1996,8 @@ def sample_delete_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2007,10 +2005,8 @@ def sample_delete_zone(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.DeleteZoneRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.DeleteZoneRequest): request = service.DeleteZoneRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2114,8 +2110,8 @@ def sample_list_zones(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2123,10 +2119,8 @@ def sample_list_zones(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListZonesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListZonesRequest): request = service.ListZonesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2233,8 +2227,8 @@ def sample_get_zone(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2242,10 +2236,8 @@ def sample_get_zone(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetZoneRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetZoneRequest): request = service.GetZoneRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2341,8 +2333,8 @@ def sample_list_zone_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2350,10 +2342,8 @@ def sample_list_zone_actions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListZoneActionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListZoneActionsRequest): request = service.ListZoneActionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2487,8 +2477,8 @@ def sample_create_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, asset, asset_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2496,10 +2486,8 @@ def sample_create_asset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CreateAssetRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CreateAssetRequest): request = service.CreateAssetRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2618,8 +2606,8 @@ def sample_update_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([asset, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2627,10 +2615,8 @@ def sample_update_asset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.UpdateAssetRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.UpdateAssetRequest): request = service.UpdateAssetRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2750,8 +2736,8 @@ def sample_delete_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2759,10 +2745,8 @@ def sample_delete_asset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.DeleteAssetRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.DeleteAssetRequest): request = service.DeleteAssetRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2866,8 +2850,8 @@ def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2875,10 +2859,8 @@ def sample_list_assets(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListAssetsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListAssetsRequest): request = service.ListAssetsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2980,8 +2962,8 @@ def sample_get_asset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2989,10 +2971,8 @@ def sample_get_asset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetAssetRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetAssetRequest): request = service.GetAssetRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3088,8 +3068,8 @@ def sample_list_asset_actions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3097,10 +3077,8 @@ def sample_list_asset_actions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListAssetActionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListAssetActionsRequest): request = service.ListAssetActionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3228,8 +3206,8 @@ def sample_create_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, task, task_id]) if request is not None and has_flattened_params: raise ValueError( @@ -3237,10 +3215,8 @@ def sample_create_task(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CreateTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CreateTaskRequest): request = service.CreateTaskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3363,8 +3339,8 @@ def sample_update_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([task, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3372,10 +3348,8 @@ def sample_update_task(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.UpdateTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.UpdateTaskRequest): request = service.UpdateTaskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3493,8 +3467,8 @@ def sample_delete_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3502,10 +3476,8 @@ def sample_delete_task(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.DeleteTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.DeleteTaskRequest): request = service.DeleteTaskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3609,8 +3581,8 @@ def sample_list_tasks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3618,10 +3590,8 @@ def sample_list_tasks(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListTasksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListTasksRequest): request = service.ListTasksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3720,8 +3690,8 @@ def sample_get_task(): A task represents a user-visible job. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3729,10 +3699,8 @@ def sample_get_task(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetTaskRequest): request = service.GetTaskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3828,8 +3796,8 @@ def sample_list_jobs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3837,10 +3805,8 @@ def sample_list_jobs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListJobsRequest): request = service.ListJobsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3939,8 +3905,8 @@ def sample_run_task(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3948,10 +3914,8 @@ def sample_run_task(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.RunTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.RunTaskRequest): request = service.RunTaskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4043,8 +4007,8 @@ def sample_get_job(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4052,10 +4016,8 @@ def sample_get_job(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetJobRequest): request = service.GetJobRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4138,8 +4100,8 @@ def sample_cancel_job(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4147,10 +4109,8 @@ def sample_cancel_job(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CancelJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CancelJobRequest): request = service.CancelJobRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4270,8 +4230,8 @@ def sample_create_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, environment, environment_id]) if request is not None and has_flattened_params: raise ValueError( @@ -4279,10 +4239,8 @@ def sample_create_environment(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.CreateEnvironmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.CreateEnvironmentRequest): request = service.CreateEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4401,8 +4359,8 @@ def sample_update_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([environment, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -4410,10 +4368,8 @@ def sample_update_environment(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.UpdateEnvironmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.UpdateEnvironmentRequest): request = service.UpdateEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4533,8 +4489,8 @@ def sample_delete_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4542,10 +4498,8 @@ def sample_delete_environment(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.DeleteEnvironmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.DeleteEnvironmentRequest): request = service.DeleteEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4649,8 +4603,8 @@ def sample_list_environments(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -4658,10 +4612,8 @@ def sample_list_environments(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListEnvironmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListEnvironmentsRequest): request = service.ListEnvironmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4763,8 +4715,8 @@ def sample_get_environment(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4772,10 +4724,8 @@ def sample_get_environment(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.GetEnvironmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.GetEnvironmentRequest): request = service.GetEnvironmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4871,8 +4821,8 @@ def sample_list_sessions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -4880,10 +4830,8 @@ def sample_list_sessions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a service.ListSessionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, service.ListSessionsRequest): request = service.ListSessionsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py index 9951115969ea..8c9901919c5d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py @@ -59,7 +59,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -79,14 +79,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -96,11 +99,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -127,7 +130,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -168,7 +171,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py index 7e5b39d7b437..562d3c751876 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -74,7 +76,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -104,7 +105,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -124,15 +125,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -142,11 +146,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -173,7 +177,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -213,7 +217,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -1130,6 +1136,311 @@ def list_sessions( ) return self._stubs["list_sessions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_lake: gapic_v1.method_async.wrap_method( + self.create_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.update_lake: gapic_v1.method_async.wrap_method( + self.update_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_lake: gapic_v1.method_async.wrap_method( + self.delete_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.list_lakes: gapic_v1.method_async.wrap_method( + self.list_lakes, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_lake: gapic_v1.method_async.wrap_method( + self.get_lake, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_lake_actions: gapic_v1.method_async.wrap_method( + self.list_lake_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: gapic_v1.method_async.wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: gapic_v1.method_async.wrap_method( + self.update_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: gapic_v1.method_async.wrap_method( + self.delete_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: gapic_v1.method_async.wrap_method( + self.list_zones, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: gapic_v1.method_async.wrap_method( + self.get_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zone_actions: gapic_v1.method_async.wrap_method( + self.list_zone_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_asset: gapic_v1.method_async.wrap_method( + self.create_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_asset: gapic_v1.method_async.wrap_method( + self.update_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_asset: gapic_v1.method_async.wrap_method( + self.delete_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: gapic_v1.method_async.wrap_method( + self.list_assets, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_asset: gapic_v1.method_async.wrap_method( + self.get_asset, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_asset_actions: gapic_v1.method_async.wrap_method( + self.list_asset_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method_async.wrap_method( + self.create_task, + default_timeout=60.0, + client_info=client_info, + ), + self.update_task: gapic_v1.method_async.wrap_method( + self.update_task, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method_async.wrap_method( + self.delete_task, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method_async.wrap_method( + self.list_tasks, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method_async.wrap_method( + self.get_task, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method_async.wrap_method( + self.list_jobs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method_async.wrap_method( + self.run_task, + default_timeout=None, + client_info=client_info, + ), + self.get_job: gapic_v1.method_async.wrap_method( + self.get_job, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: gapic_v1.method_async.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_environment: gapic_v1.method_async.wrap_method( + self.create_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.update_environment: gapic_v1.method_async.wrap_method( + self.update_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_environment: gapic_v1.method_async.wrap_method( + self.delete_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: gapic_v1.method_async.wrap_method( + self.list_environments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_environment: gapic_v1.method_async.wrap_method( + self.get_environment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method_async.wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py index f433e3ea0679..77adbf93dc8d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -17,6 +17,7 @@ import functools import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -206,7 +207,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetadataServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, MetadataServiceTransport, Callable[..., MetadataServiceTransport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -218,9 +223,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetadataServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -340,8 +347,8 @@ async def sample_create_entity(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entity]) if request is not None and has_flattened_params: raise ValueError( @@ -349,7 +356,10 @@ async def sample_create_entity(): "the individual field arguments should be set." ) - request = metadata_.CreateEntityRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -360,11 +370,9 @@ async def sample_create_entity(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entity, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entity + ] # Certain fields should be provided within the metadata header; # add these here. @@ -454,15 +462,16 @@ async def sample_update_entity(): """ # Create or coerce a protobuf request object. - request = metadata_.UpdateEntityRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entity, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_entity + ] # Certain fields should be provided within the metadata header; # add these here. @@ -538,8 +547,8 @@ async def sample_delete_entity(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -547,7 +556,10 @@ async def sample_delete_entity(): "the individual field arguments should be set." ) - request = metadata_.DeleteEntityRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -556,11 +568,9 @@ async def sample_delete_entity(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entity, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entity + ] # Certain fields should be provided within the metadata header; # add these here. @@ -639,8 +649,8 @@ async def sample_get_entity(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -648,7 +658,10 @@ async def sample_get_entity(): "the individual field arguments should be set." ) - request = metadata_.GetEntityRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetEntityRequest): + request = metadata_.GetEntityRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -657,20 +670,9 @@ async def sample_get_entity(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entity, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entity + ] # Certain fields should be provided within the metadata header; # add these here. @@ -757,8 +759,8 @@ async def sample_list_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -766,7 +768,10 @@ async def sample_list_entities(): "the individual field arguments should be set." ) - request = metadata_.ListEntitiesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListEntitiesRequest): + request = metadata_.ListEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -775,20 +780,9 @@ async def sample_list_entities(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entities, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_entities + ] # Certain fields should be provided within the metadata header; # add these here. @@ -890,8 +884,8 @@ async def sample_create_partition(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, partition]) if request is not None and has_flattened_params: raise ValueError( @@ -899,7 +893,10 @@ async def sample_create_partition(): "the individual field arguments should be set." ) - request = metadata_.CreatePartitionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -910,11 +907,9 @@ async def sample_create_partition(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_partition, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_partition + ] # Certain fields should be provided within the metadata header; # add these here. @@ -990,8 +985,8 @@ async def sample_delete_partition(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -999,7 +994,10 @@ async def sample_delete_partition(): "the individual field arguments should be set." ) - request = metadata_.DeletePartitionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1008,11 +1006,9 @@ async def sample_delete_partition(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_partition, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_partition + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1094,8 +1090,8 @@ async def sample_get_partition(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1103,7 +1099,10 @@ async def sample_get_partition(): "the individual field arguments should be set." ) - request = metadata_.GetPartitionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetPartitionRequest): + request = metadata_.GetPartitionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1112,20 +1111,9 @@ async def sample_get_partition(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_partition, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_partition + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1211,8 +1199,8 @@ async def sample_list_partitions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1220,7 +1208,10 @@ async def sample_list_partitions(): "the individual field arguments should be set." ) - request = metadata_.ListPartitionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListPartitionsRequest): + request = metadata_.ListPartitionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1229,20 +1220,9 @@ async def sample_list_partitions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_partitions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_partitions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 7667cd4880bf..cc32c2e78f2c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -593,7 +594,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetadataServiceTransport]] = None, + transport: Optional[ + Union[ + str, MetadataServiceTransport, Callable[..., MetadataServiceTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -605,9 +610,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MetadataServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -716,8 +723,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetadataServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -805,8 +819,8 @@ def sample_create_entity(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, entity]) if request is not None and has_flattened_params: raise ValueError( @@ -814,10 +828,8 @@ def sample_create_entity(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.CreateEntityRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.CreateEntityRequest): request = metadata_.CreateEntityRequest(request) # If we have keyword arguments corresponding to fields on the @@ -919,10 +931,8 @@ def sample_update_entity(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.UpdateEntityRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.UpdateEntityRequest): request = metadata_.UpdateEntityRequest(request) @@ -1004,8 +1014,8 @@ def sample_delete_entity(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1013,10 +1023,8 @@ def sample_delete_entity(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.DeleteEntityRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.DeleteEntityRequest): request = metadata_.DeleteEntityRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1105,8 +1113,8 @@ def sample_get_entity(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1114,10 +1122,8 @@ def sample_get_entity(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.GetEntityRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.GetEntityRequest): request = metadata_.GetEntityRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1214,8 +1220,8 @@ def sample_list_entities(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1223,10 +1229,8 @@ def sample_list_entities(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.ListEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.ListEntitiesRequest): request = metadata_.ListEntitiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1338,8 +1342,8 @@ def sample_create_partition(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, partition]) if request is not None and has_flattened_params: raise ValueError( @@ -1347,10 +1351,8 @@ def sample_create_partition(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.CreatePartitionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.CreatePartitionRequest): request = metadata_.CreatePartitionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1438,8 +1440,8 @@ def sample_delete_partition(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1447,10 +1449,8 @@ def sample_delete_partition(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.DeletePartitionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.DeletePartitionRequest): request = metadata_.DeletePartitionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1542,8 +1542,8 @@ def sample_get_partition(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1551,10 +1551,8 @@ def sample_get_partition(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.GetPartitionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.GetPartitionRequest): request = metadata_.GetPartitionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1650,8 +1648,8 @@ def sample_list_partitions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1659,10 +1657,8 @@ def sample_list_partitions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a metadata_.ListPartitionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, metadata_.ListPartitionsRequest): request = metadata_.ListPartitionsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py index ec1f0bae0360..b8ff166b7bb7 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py @@ -55,7 +55,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -75,14 +75,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -92,11 +95,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -122,7 +125,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -163,7 +166,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py index 3e9cb8d87b7b..b97bdc2e39f1 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py @@ -16,7 +16,9 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -70,7 +72,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -100,7 +101,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -120,15 +121,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -138,11 +142,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -168,7 +172,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -208,7 +212,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -476,6 +482,92 @@ def list_partitions( ) return self._stubs["list_partitions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entity: gapic_v1.method_async.wrap_method( + self.create_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entity: gapic_v1.method_async.wrap_method( + self.update_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entity: gapic_v1.method_async.wrap_method( + self.delete_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.get_entity: gapic_v1.method_async.wrap_method( + self.get_entity, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_entities: gapic_v1.method_async.wrap_method( + self.list_entities, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_partition: gapic_v1.method_async.wrap_method( + self.create_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_partition: gapic_v1.method_async.wrap_method( + self.delete_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.get_partition: gapic_v1.method_async.wrap_method( + self.get_partition, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_partitions: gapic_v1.method_async.wrap_method( + self.list_partitions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 436415d353ff..8f680727ea7c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -766,10 +766,16 @@ class Entry(proto.Message): updated. aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): Optional. The Aspects attached to the Entry. - The key is either the resource name of the - aspect type (if the aspect is attached directly - to the entry) or "aspectType@path" if the aspect - is attached to an entry's path. + The format for the key can be one of the + following: + + 1. {projectId}.{locationId}.{aspectTypeId} (if + the aspect is attached directly to the + entry) + 2. + {projectId}.{locationId}.{aspectTypeId}@{path} + (if the aspect is attached to an entry's + path) parent_entry (str): Optional. Immutable. The resource name of the parent entry. @@ -1639,14 +1645,17 @@ class ListEntriesRequest(proto.Message): filter (str): Optional. A filter on the entries to return. Filters are case-sensitive. The request can be filtered by the following - fields: entry_type, display_name. The comparison operators - are =, !=, <, >, <=, >= (strings are compared according to - lexical order) The logical operators AND, OR, NOT can be - used in the filter. Example filter expressions: - "display_name=AnExampleDisplayName" + fields: entry_type, entry_source.display_name. The + comparison operators are =, !=, <, >, <=, >= (strings are + compared according to lexical order) The logical operators + AND, OR, NOT can be used in the filter. Wildcard "*" can be + used, but for entry_type the full project id or number needs + to be provided. Example filter expressions: + "entry_source.display_name=AnExampleDisplayName" "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - "entry_type=projects/a\* OR "entry_type=projects/k*" "NOT - display_name=AnotherExampleDisplayName". + "entry_type=projects/example-project/locations/us/entryTypes/a* + OR entry_type=projects/another-project/locations/* " "NOT + entry_source.display_name=AnotherExampleDisplayName". """ parent: str = proto.Field( @@ -1836,20 +1845,6 @@ class SearchEntriesResult(proto.Message): r"""A single result of a SearchEntries request. Attributes: - entry (str): - Resource name of the entry. - display_name (str): - Display name. - entry_type (str): - The entry type. - modify_time (google.protobuf.timestamp_pb2.Timestamp): - The last modification timestamp. - fully_qualified_name (str): - Fully qualified name. - description (str): - Entry description. - relative_resource (str): - Relative resource name. linked_resource (str): Linked resource name. dataplex_entry (google.cloud.dataplex_v1.types.Entry): @@ -1873,35 +1868,6 @@ class Snippets(proto.Message): message="Entry", ) - entry: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - entry_type: str = proto.Field( - proto.STRING, - number=3, - ) - modify_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=5, - ) - description: str = proto.Field( - proto.STRING, - number=6, - ) - relative_resource: str = proto.Field( - proto.STRING, - number=7, - ) linked_resource: str = proto.Field( proto.STRING, number=8, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 564f7553f97c..20474172f78b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -382,6 +382,11 @@ class DataQualityRuleResult(proto.Message): rule. This field is only valid for row-level type rules. + assertion_row_count (int): + Output only. The number of rows returned by + the sql statement in the SqlAssertion rule. + + This field is only valid for SqlAssertion rules. """ rule: "DataQualityRule" = proto.Field( @@ -413,6 +418,10 @@ class DataQualityRuleResult(proto.Message): proto.STRING, number=10, ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=11, + ) class DataQualityDimensionResult(proto.Message): @@ -522,6 +531,11 @@ class DataQualityRule(proto.Message): Aggregate rule which evaluates whether the provided expression is true for a table. + This field is a member of `oneof`_ ``rule_type``. + sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): + Aggregate rule which evaluates the number of + rows returned for the provided statement. + This field is a member of `oneof`_ ``rule_type``. column (str): Optional. The unnested column which this rule @@ -755,6 +769,29 @@ class TableConditionExpectation(proto.Message): number=1, ) + class SqlAssertion(proto.Message): + r"""Queries for rows returned by the provided SQL statement. If any rows + are are returned, this rule fails. + + The SQL statement needs to use BigQuery standard SQL syntax, and + must not contain any semicolons. + + ${data()} can be used to reference the rows being evaluated, i.e. + the table after all additional filters (row filters, incremental + data filters, sampling) are applied. + + Example: SELECT \* FROM ${data()} WHERE price < 0 + + Attributes: + sql_statement (str): + Optional. The SQL statement. + """ + + sql_statement: str = proto.Field( + proto.STRING, + number=1, + ) + range_expectation: RangeExpectation = proto.Field( proto.MESSAGE, number=1, @@ -803,6 +840,12 @@ class TableConditionExpectation(proto.Message): oneof="rule_type", message=TableConditionExpectation, ) + sql_assertion: SqlAssertion = proto.Field( + proto.MESSAGE, + number=202, + oneof="rule_type", + message=SqlAssertion, + ) column: str = proto.Field( proto.STRING, number=500, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index f095af2f7133..efb223fced23 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -1137,6 +1137,10 @@ class DataQualityScanRuleResult(proto.Message): null_row_count (int): The number of rows with null values in the specified column. + assertion_row_count (int): + The number of rows returned by the sql + statement in the SqlAssertion rule. This field + is only valid for SqlAssertion rules. """ class RuleType(proto.Enum): @@ -1169,6 +1173,9 @@ class RuleType(proto.Enum): UNIQUENESS_EXPECTATION (8): Please see https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#uniquenessexpectation. + SQL_ASSERTION (9): + Please see + https://cloud.google.com/dataplex/docs/reference/rest/v1/DataQualityRule#sqlAssertion. """ RULE_TYPE_UNSPECIFIED = 0 NON_NULL_EXPECTATION = 1 @@ -1179,6 +1186,7 @@ class RuleType(proto.Enum): STATISTIC_RANGE_EXPECTATION = 6 TABLE_CONDITION_EXPECTATION = 7 UNIQUENESS_EXPECTATION = 8 + SQL_ASSERTION = 9 class EvaluationType(proto.Enum): r"""The evaluation type of the data quality rule. @@ -1262,6 +1270,10 @@ class Result(proto.Enum): proto.INT64, number=12, ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=13, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index baa8573fb1e1..08d7b0e99879 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -1158,6 +1158,9 @@ def test_create_entry_type_empty_call(): with mock.patch.object( type(client.transport.create_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1184,6 +1187,9 @@ def test_create_entry_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1193,6 +1199,47 @@ def test_create_entry_type_non_empty_request_with_auto_populated_field(): ) +def test_create_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entry_type + ] = mock_rpc + request = {} + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1216,6 +1263,56 @@ async def test_create_entry_type_empty_call_async(): assert args[0] == catalog.CreateEntryTypeRequest() +@pytest.mark.asyncio +async def test_create_entry_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_entry_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entry_type + ] = mock_object + + request = {} + await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_type_async( transport: str = "grpc_asyncio", request_type=catalog.CreateEntryTypeRequest @@ -1472,6 +1569,9 @@ def test_update_entry_type_empty_call(): with mock.patch.object( type(client.transport.update_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1495,12 +1595,56 @@ def test_update_entry_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == catalog.UpdateEntryTypeRequest() +def test_update_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_entry_type + ] = mock_rpc + request = {} + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1524,6 +1668,56 @@ async def test_update_entry_type_empty_call_async(): assert args[0] == catalog.UpdateEntryTypeRequest() +@pytest.mark.asyncio +async def test_update_entry_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_entry_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_entry_type + ] = mock_object + + request = {} + await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_type_async( transport: str = "grpc_asyncio", request_type=catalog.UpdateEntryTypeRequest @@ -1770,6 +1964,9 @@ def test_delete_entry_type_empty_call(): with mock.patch.object( type(client.transport.delete_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1796,6 +1993,9 @@ def test_delete_entry_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_entry_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1805,6 +2005,47 @@ def test_delete_entry_type_non_empty_request_with_auto_populated_field(): ) +def test_delete_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_entry_type + ] = mock_rpc + request = {} + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entry_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1828,6 +2069,56 @@ async def test_delete_entry_type_empty_call_async(): assert args[0] == catalog.DeleteEntryTypeRequest() +@pytest.mark.asyncio +async def test_delete_entry_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_entry_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entry_type + ] = mock_object + + request = {} + await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entry_type_async( transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryTypeRequest @@ -2065,6 +2356,9 @@ def test_list_entry_types_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entry_types() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2091,6 +2385,9 @@ def test_list_entry_types_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entry_types(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2102,6 +2399,43 @@ def test_list_entry_types_non_empty_request_with_auto_populated_field(): ) +def test_list_entry_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entry_types + ] = mock_rpc + request = {} + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_entry_types_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2126,6 +2460,52 @@ async def test_list_entry_types_empty_call_async(): assert args[0] == catalog.ListEntryTypesRequest() +@pytest.mark.asyncio +async def test_list_entry_types_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_entry_types + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_entry_types + ] = mock_object + + request = {} + await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_entry_types_async( transport: str = "grpc_asyncio", request_type=catalog.ListEntryTypesRequest @@ -2560,6 +2940,9 @@ def test_get_entry_type_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2583,6 +2966,9 @@ def test_get_entry_type_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2591,6 +2977,41 @@ def test_get_entry_type_non_empty_request_with_auto_populated_field(): ) +def test_get_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + request = {} + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2621,6 +3042,52 @@ async def test_get_entry_type_empty_call_async(): assert args[0] == catalog.GetEntryTypeRequest() +@pytest.mark.asyncio +async def test_get_entry_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_entry_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entry_type + ] = mock_object + + request = {} + await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_type_async( transport: str = "grpc_asyncio", request_type=catalog.GetEntryTypeRequest @@ -2860,6 +3327,9 @@ def test_create_aspect_type_empty_call(): with mock.patch.object( type(client.transport.create_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_aspect_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2886,6 +3356,9 @@ def test_create_aspect_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_aspect_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2895,6 +3368,49 @@ def test_create_aspect_type_non_empty_request_with_auto_populated_field(): ) +def test_create_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_aspect_type in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_aspect_type + ] = mock_rpc + request = {} + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_aspect_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2918,6 +3434,56 @@ async def test_create_aspect_type_empty_call_async(): assert args[0] == catalog.CreateAspectTypeRequest() +@pytest.mark.asyncio +async def test_create_aspect_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_aspect_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_aspect_type + ] = mock_object + + request = {} + await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_aspect_type_async( transport: str = "grpc_asyncio", request_type=catalog.CreateAspectTypeRequest @@ -3174,6 +3740,9 @@ def test_update_aspect_type_empty_call(): with mock.patch.object( type(client.transport.update_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_aspect_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3197,12 +3766,58 @@ def test_update_aspect_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_aspect_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == catalog.UpdateAspectTypeRequest() +def test_update_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_aspect_type in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_aspect_type + ] = mock_rpc + request = {} + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_aspect_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3227,11 +3842,61 @@ async def test_update_aspect_type_empty_call_async(): @pytest.mark.asyncio -async def test_update_aspect_type_async( - transport: str = "grpc_asyncio", request_type=catalog.UpdateAspectTypeRequest +async def test_update_aspect_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = CatalogServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_aspect_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_aspect_type + ] = mock_object + + request = {} + await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_aspect_type_async( + transport: str = "grpc_asyncio", request_type=catalog.UpdateAspectTypeRequest +): + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3472,6 +4137,9 @@ def test_delete_aspect_type_empty_call(): with mock.patch.object( type(client.transport.delete_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_aspect_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3498,6 +4166,9 @@ def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_aspect_type), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_aspect_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3507,6 +4178,49 @@ def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): ) +def test_delete_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_aspect_type in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_aspect_type + ] = mock_rpc + request = {} + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_aspect_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3530,6 +4244,56 @@ async def test_delete_aspect_type_empty_call_async(): assert args[0] == catalog.DeleteAspectTypeRequest() +@pytest.mark.asyncio +async def test_delete_aspect_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_aspect_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_aspect_type + ] = mock_object + + request = {} + await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_aspect_type_async( transport: str = "grpc_asyncio", request_type=catalog.DeleteAspectTypeRequest @@ -3771,6 +4535,9 @@ def test_list_aspect_types_empty_call(): with mock.patch.object( type(client.transport.list_aspect_types), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_aspect_types() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3799,6 +4566,9 @@ def test_list_aspect_types_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_aspect_types), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_aspect_types(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3810,6 +4580,43 @@ def test_list_aspect_types_non_empty_request_with_auto_populated_field(): ) +def test_list_aspect_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_aspect_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_aspect_types + ] = mock_rpc + request = {} + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_aspect_types_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3836,6 +4643,52 @@ async def test_list_aspect_types_empty_call_async(): assert args[0] == catalog.ListAspectTypesRequest() +@pytest.mark.asyncio +async def test_list_aspect_types_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_aspect_types + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_aspect_types + ] = mock_object + + request = {} + await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_aspect_types_async( transport: str = "grpc_asyncio", request_type=catalog.ListAspectTypesRequest @@ -4284,6 +5137,9 @@ def test_get_aspect_type_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_aspect_type() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4307,6 +5163,9 @@ def test_get_aspect_type_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_aspect_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4315,6 +5174,41 @@ def test_get_aspect_type_non_empty_request_with_auto_populated_field(): ) +def test_get_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + request = {} + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_aspect_type_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4343,6 +5237,52 @@ async def test_get_aspect_type_empty_call_async(): assert args[0] == catalog.GetAspectTypeRequest() +@pytest.mark.asyncio +async def test_get_aspect_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_aspect_type + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_aspect_type + ] = mock_object + + request = {} + await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_aspect_type_async( transport: str = "grpc_asyncio", request_type=catalog.GetAspectTypeRequest @@ -4578,6 +5518,9 @@ def test_create_entry_group_empty_call(): with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4604,6 +5547,9 @@ def test_create_entry_group_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4613,6 +5559,49 @@ def test_create_entry_group_non_empty_request_with_auto_populated_field(): ) +def test_create_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_entry_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entry_group + ] = mock_rpc + request = {} + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4636,6 +5625,56 @@ async def test_create_entry_group_empty_call_async(): assert args[0] == catalog.CreateEntryGroupRequest() +@pytest.mark.asyncio +async def test_create_entry_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_entry_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entry_group + ] = mock_object + + request = {} + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_group_async( transport: str = "grpc_asyncio", request_type=catalog.CreateEntryGroupRequest @@ -4892,6 +5931,9 @@ def test_update_entry_group_empty_call(): with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4915,12 +5957,58 @@ def test_update_entry_group_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == catalog.UpdateEntryGroupRequest() +def test_update_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_entry_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_entry_group + ] = mock_rpc + request = {} + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4944,6 +6032,56 @@ async def test_update_entry_group_empty_call_async(): assert args[0] == catalog.UpdateEntryGroupRequest() +@pytest.mark.asyncio +async def test_update_entry_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_entry_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_entry_group + ] = mock_object + + request = {} + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_group_async( transport: str = "grpc_asyncio", request_type=catalog.UpdateEntryGroupRequest @@ -5190,6 +6328,9 @@ def test_delete_entry_group_empty_call(): with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5216,6 +6357,9 @@ def test_delete_entry_group_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5225,6 +6369,49 @@ def test_delete_entry_group_non_empty_request_with_auto_populated_field(): ) +def test_delete_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_entry_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_entry_group + ] = mock_rpc + request = {} + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entry_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5249,20 +6436,70 @@ async def test_delete_entry_group_empty_call_async(): @pytest.mark.asyncio -async def test_delete_entry_group_async( - transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryGroupRequest +async def test_delete_entry_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = CatalogServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( + # Ensure method has been cached + assert ( + client._client._transport.delete_entry_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entry_group + ] = mock_object + + request = {} + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_entry_group_async( + transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryGroupRequest +): + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. @@ -5489,6 +6726,9 @@ def test_list_entry_groups_empty_call(): with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entry_groups() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5517,6 +6757,9 @@ def test_list_entry_groups_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entry_groups(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5528,6 +6771,43 @@ def test_list_entry_groups_non_empty_request_with_auto_populated_field(): ) +def test_list_entry_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_groups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entry_groups + ] = mock_rpc + request = {} + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_entry_groups_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5554,6 +6834,52 @@ async def test_list_entry_groups_empty_call_async(): assert args[0] == catalog.ListEntryGroupsRequest() +@pytest.mark.asyncio +async def test_list_entry_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_entry_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_entry_groups + ] = mock_object + + request = {} + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_entry_groups_async( transport: str = "grpc_asyncio", request_type=catalog.ListEntryGroupsRequest @@ -6002,6 +7328,9 @@ def test_get_entry_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6025,6 +7354,9 @@ def test_get_entry_group_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6033,6 +7365,41 @@ def test_get_entry_group_non_empty_request_with_auto_populated_field(): ) +def test_get_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + request = {} + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6061,6 +7428,52 @@ async def test_get_entry_group_empty_call_async(): assert args[0] == catalog.GetEntryGroupRequest() +@pytest.mark.asyncio +async def test_get_entry_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_entry_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entry_group + ] = mock_object + + request = {} + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_group_async( transport: str = "grpc_asyncio", request_type=catalog.GetEntryGroupRequest @@ -6301,6 +7714,9 @@ def test_create_entry_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6325,6 +7741,9 @@ def test_create_entry_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6334,6 +7753,41 @@ def test_create_entry_non_empty_request_with_auto_populated_field(): ) +def test_create_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + request = {} + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6360,6 +7814,52 @@ async def test_create_entry_empty_call_async(): assert args[0] == catalog.CreateEntryRequest() +@pytest.mark.asyncio +async def test_create_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entry + ] = mock_object + + request = {} + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_entry_async( transport: str = "grpc_asyncio", request_type=catalog.CreateEntryRequest @@ -6616,6 +8116,9 @@ def test_update_entry_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6637,12 +8140,50 @@ def test_update_entry_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == catalog.UpdateEntryRequest() +def test_update_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + request = {} + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6669,6 +8210,52 @@ async def test_update_entry_empty_call_async(): assert args[0] == catalog.UpdateEntryRequest() +@pytest.mark.asyncio +async def test_update_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_entry + ] = mock_object + + request = {} + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_entry_async( transport: str = "grpc_asyncio", request_type=catalog.UpdateEntryRequest @@ -6915,6 +8502,9 @@ def test_delete_entry_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6938,6 +8528,9 @@ def test_delete_entry_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6946,6 +8539,41 @@ def test_delete_entry_non_empty_request_with_auto_populated_field(): ) +def test_delete_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + request = {} + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6972,6 +8600,52 @@ async def test_delete_entry_empty_call_async(): assert args[0] == catalog.DeleteEntryRequest() +@pytest.mark.asyncio +async def test_delete_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entry + ] = mock_object + + request = {} + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entry_async( transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryRequest @@ -7202,6 +8876,9 @@ def test_list_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7227,6 +8904,9 @@ def test_list_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7237,6 +8917,41 @@ def test_list_entries_non_empty_request_with_auto_populated_field(): ) +def test_list_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + request = {} + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7260,6 +8975,52 @@ async def test_list_entries_empty_call_async(): assert args[0] == catalog.ListEntriesRequest() +@pytest.mark.asyncio +async def test_list_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_entries + ] = mock_object + + request = {} + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_entries_async( transport: str = "grpc_asyncio", request_type=catalog.ListEntriesRequest @@ -7684,6 +9445,9 @@ def test_get_entry_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7707,6 +9471,9 @@ def test_get_entry_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7715,6 +9482,41 @@ def test_get_entry_non_empty_request_with_auto_populated_field(): ) +def test_get_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + request = {} + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7741,6 +9543,50 @@ async def test_get_entry_empty_call_async(): assert args[0] == catalog.GetEntryRequest() +@pytest.mark.asyncio +async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entry + ] = mock_object + + request = {} + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_entry_async( transport: str = "grpc_asyncio", request_type=catalog.GetEntryRequest @@ -7977,6 +9823,9 @@ def test_lookup_entry_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup_entry() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8001,6 +9850,9 @@ def test_lookup_entry_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.lookup_entry(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8010,6 +9862,41 @@ def test_lookup_entry_non_empty_request_with_auto_populated_field(): ) +def test_lookup_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + request = {} + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_entry_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8036,6 +9923,52 @@ async def test_lookup_entry_empty_call_async(): assert args[0] == catalog.LookupEntryRequest() +@pytest.mark.asyncio +async def test_lookup_entry_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.lookup_entry + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.lookup_entry + ] = mock_object + + request = {} + await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_lookup_entry_async( transport: str = "grpc_asyncio", request_type=catalog.LookupEntryRequest @@ -8190,6 +10123,9 @@ def test_search_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8217,6 +10153,9 @@ def test_search_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8229,6 +10168,41 @@ def test_search_entries_non_empty_request_with_auto_populated_field(): ) +def test_search_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + request = {} + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_search_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8254,6 +10228,52 @@ async def test_search_entries_empty_call_async(): assert args[0] == catalog.SearchEntriesRequest() +@pytest.mark.asyncio +async def test_search_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.search_entries + ] = mock_object + + request = {} + await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_search_entries_async( transport: str = "grpc_asyncio", request_type=catalog.SearchEntriesRequest diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index a50a858cb318..0805a486c96b 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -1156,6 +1156,9 @@ def test_create_content_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_content() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1179,6 +1182,9 @@ def test_create_content_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_content(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1187,6 +1193,41 @@ def test_create_content_non_empty_request_with_auto_populated_field(): ) +def test_create_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_content] = mock_rpc + request = {} + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_content_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1213,6 +1254,52 @@ async def test_create_content_empty_call_async(): assert args[0] == gcd_content.CreateContentRequest() +@pytest.mark.asyncio +async def test_create_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_content + ] = mock_object + + request = {} + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_content_async( transport: str = "grpc_asyncio", request_type=gcd_content.CreateContentRequest @@ -1460,6 +1547,9 @@ def test_update_content_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_content() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1481,12 +1571,50 @@ def test_update_content_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_content(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == gcd_content.UpdateContentRequest() +def test_update_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_content] = mock_rpc + request = {} + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_content_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1513,6 +1641,52 @@ async def test_update_content_empty_call_async(): assert args[0] == gcd_content.UpdateContentRequest() +@pytest.mark.asyncio +async def test_update_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_content + ] = mock_object + + request = {} + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_content_async( transport: str = "grpc_asyncio", request_type=gcd_content.UpdateContentRequest @@ -1750,6 +1924,9 @@ def test_delete_content_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_content() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1773,6 +1950,9 @@ def test_delete_content_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_content(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1781,6 +1961,41 @@ def test_delete_content_non_empty_request_with_auto_populated_field(): ) +def test_delete_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc + request = {} + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_content_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1800,6 +2015,52 @@ async def test_delete_content_empty_call_async(): assert args[0] == content.DeleteContentRequest() +@pytest.mark.asyncio +async def test_delete_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_content + ] = mock_object + + request = {} + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_content_async( transport: str = "grpc_asyncio", request_type=content.DeleteContentRequest @@ -2026,6 +2287,9 @@ def test_get_content_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_content() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2049,6 +2313,9 @@ def test_get_content_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_content(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2057,6 +2324,41 @@ def test_get_content_non_empty_request_with_auto_populated_field(): ) +def test_get_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_content] = mock_rpc + request = {} + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_content_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2083,6 +2385,52 @@ async def test_get_content_empty_call_async(): assert args[0] == content.GetContentRequest() +@pytest.mark.asyncio +async def test_get_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_content + ] = mock_object + + request = {} + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_content_async( transport: str = "grpc_asyncio", request_type=content.GetContentRequest @@ -2315,6 +2663,9 @@ def test_get_iam_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2338,6 +2689,9 @@ def test_get_iam_policy_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_iam_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2346,6 +2700,41 @@ def test_get_iam_policy_non_empty_request_with_auto_populated_field(): ) +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_iam_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2370,6 +2759,52 @@ async def test_get_iam_policy_empty_call_async(): assert args[0] == iam_policy_pb2.GetIamPolicyRequest() +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_object + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest @@ -2615,6 +3050,9 @@ def test_set_iam_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2638,6 +3076,9 @@ def test_set_iam_policy_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.set_iam_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2646,6 +3087,41 @@ def test_set_iam_policy_non_empty_request_with_auto_populated_field(): ) +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_set_iam_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2670,6 +3146,52 @@ async def test_set_iam_policy_empty_call_async(): assert args[0] == iam_policy_pb2.SetIamPolicyRequest() +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_object + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest @@ -2838,6 +3360,9 @@ def test_test_iam_permissions_empty_call(): with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2863,6 +3388,9 @@ def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.test_iam_permissions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2871,6 +3399,45 @@ def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): ) +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_test_iam_permissions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2896,6 +3463,52 @@ async def test_test_iam_permissions_empty_call_async(): assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.test_iam_permissions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.test_iam_permissions + ] = mock_object + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", @@ -3068,6 +3681,9 @@ def test_list_content_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_content() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3093,6 +3709,9 @@ def test_list_content_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_content(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3103,6 +3722,41 @@ def test_list_content_non_empty_request_with_auto_populated_field(): ) +def test_list_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_content] = mock_rpc + request = {} + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_content_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3126,6 +3780,52 @@ async def test_list_content_empty_call_async(): assert args[0] == content.ListContentRequest() +@pytest.mark.asyncio +async def test_list_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_content + ] = mock_object + + request = {} + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_content_async( transport: str = "grpc_asyncio", request_type=content.ListContentRequest diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index 10bd64c8db3a..6c558a379755 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -1174,6 +1174,9 @@ def test_create_data_scan_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_scan() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1198,6 +1201,9 @@ def test_create_data_scan_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_scan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1207,6 +1213,47 @@ def test_create_data_scan_non_empty_request_with_auto_populated_field(): ) +def test_create_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_scan + ] = mock_rpc + request = {} + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_scan_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1228,6 +1275,56 @@ async def test_create_data_scan_empty_call_async(): assert args[0] == datascans.CreateDataScanRequest() +@pytest.mark.asyncio +async def test_create_data_scan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_scan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_scan + ] = mock_object + + request = {} + await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_scan_async( transport: str = "grpc_asyncio", request_type=datascans.CreateDataScanRequest @@ -1470,6 +1567,9 @@ def test_update_data_scan_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_scan() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1491,12 +1591,56 @@ def test_update_data_scan_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_scan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datascans.UpdateDataScanRequest() +def test_update_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_scan + ] = mock_rpc + request = {} + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_scan_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1518,6 +1662,56 @@ async def test_update_data_scan_empty_call_async(): assert args[0] == datascans.UpdateDataScanRequest() +@pytest.mark.asyncio +async def test_update_data_scan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_scan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_scan + ] = mock_object + + request = {} + await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_scan_async( transport: str = "grpc_asyncio", request_type=datascans.UpdateDataScanRequest @@ -1750,6 +1944,9 @@ def test_delete_data_scan_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_scan() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1773,6 +1970,9 @@ def test_delete_data_scan_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_scan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1781,6 +1981,47 @@ def test_delete_data_scan_non_empty_request_with_auto_populated_field(): ) +def test_delete_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_scan + ] = mock_rpc + request = {} + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_scan_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1802,6 +2043,56 @@ async def test_delete_data_scan_empty_call_async(): assert args[0] == datascans.DeleteDataScanRequest() +@pytest.mark.asyncio +async def test_delete_data_scan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_scan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_scan + ] = mock_object + + request = {} + await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_scan_async( transport: str = "grpc_asyncio", request_type=datascans.DeleteDataScanRequest @@ -2037,6 +2328,9 @@ def test_get_data_scan_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_scan() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2060,6 +2354,9 @@ def test_get_data_scan_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_scan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2068,6 +2365,41 @@ def test_get_data_scan_non_empty_request_with_auto_populated_field(): ) +def test_get_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc + request = {} + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_scan_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2096,6 +2428,52 @@ async def test_get_data_scan_empty_call_async(): assert args[0] == datascans.GetDataScanRequest() +@pytest.mark.asyncio +async def test_get_data_scan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_scan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_scan + ] = mock_object + + request = {} + await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_scan_async( transport: str = "grpc_asyncio", request_type=datascans.GetDataScanRequest @@ -2332,6 +2710,9 @@ def test_list_data_scans_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_scans), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_scans() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2358,6 +2739,9 @@ def test_list_data_scans_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_scans), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_scans(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2369,6 +2753,41 @@ def test_list_data_scans_non_empty_request_with_auto_populated_field(): ) +def test_list_data_scans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc + request = {} + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_scans_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2393,6 +2812,52 @@ async def test_list_data_scans_empty_call_async(): assert args[0] == datascans.ListDataScansRequest() +@pytest.mark.asyncio +async def test_list_data_scans_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_scans + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_scans + ] = mock_object + + request = {} + await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_scans_async( transport: str = "grpc_asyncio", request_type=datascans.ListDataScansRequest @@ -2810,6 +3275,9 @@ def test_run_data_scan_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_data_scan() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2833,6 +3301,9 @@ def test_run_data_scan_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_data_scan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_data_scan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2841,6 +3312,41 @@ def test_run_data_scan_non_empty_request_with_auto_populated_field(): ) +def test_run_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc + request = {} + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_data_scan_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2862,6 +3368,52 @@ async def test_run_data_scan_empty_call_async(): assert args[0] == datascans.RunDataScanRequest() +@pytest.mark.asyncio +async def test_run_data_scan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_data_scan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_data_scan + ] = mock_object + + request = {} + await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_data_scan_async( transport: str = "grpc_asyncio", request_type=datascans.RunDataScanRequest @@ -3099,6 +3651,9 @@ def test_get_data_scan_job_empty_call(): with mock.patch.object( type(client.transport.get_data_scan_job), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_scan_job() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3124,6 +3679,9 @@ def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_data_scan_job), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_scan_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3132,6 +3690,43 @@ def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): ) +def test_get_data_scan_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_scan_job + ] = mock_rpc + request = {} + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_scan_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3161,6 +3756,52 @@ async def test_get_data_scan_job_empty_call_async(): assert args[0] == datascans.GetDataScanJobRequest() +@pytest.mark.asyncio +async def test_get_data_scan_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_scan_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_scan_job + ] = mock_object + + request = {} + await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_scan_job_async( transport: str = "grpc_asyncio", request_type=datascans.GetDataScanJobRequest @@ -3411,6 +4052,9 @@ def test_list_data_scan_jobs_empty_call(): with mock.patch.object( type(client.transport.list_data_scan_jobs), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_scan_jobs() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3438,6 +4082,9 @@ def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_data_scan_jobs), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_scan_jobs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3448,6 +4095,45 @@ def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): ) +def test_list_data_scan_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_scan_jobs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_scan_jobs + ] = mock_rpc + request = {} + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_scan_jobs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3473,6 +4159,52 @@ async def test_list_data_scan_jobs_empty_call_async(): assert args[0] == datascans.ListDataScanJobsRequest() +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_scan_jobs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_scan_jobs + ] = mock_object + + request = {} + await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_scan_jobs_async( transport: str = "grpc_asyncio", request_type=datascans.ListDataScanJobsRequest @@ -3910,6 +4642,9 @@ def test_generate_data_quality_rules_empty_call(): with mock.patch.object( type(client.transport.generate_data_quality_rules), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.generate_data_quality_rules() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3935,6 +4670,9 @@ def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field with mock.patch.object( type(client.transport.generate_data_quality_rules), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.generate_data_quality_rules(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3943,6 +4681,46 @@ def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field ) +def test_generate_data_quality_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_data_quality_rules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_data_quality_rules + ] = mock_rpc + request = {} + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_generate_data_quality_rules_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3966,6 +4744,52 @@ async def test_generate_data_quality_rules_empty_call_async(): assert args[0] == datascans.GenerateDataQualityRulesRequest() +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_data_quality_rules + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_data_quality_rules + ] = mock_object + + request = {} + await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_generate_data_quality_rules_async( transport: str = "grpc_asyncio", diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index 69111b1f2997..a77702136507 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -1200,6 +1200,9 @@ def test_create_data_taxonomy_empty_call(): with mock.patch.object( type(client.transport.create_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1226,6 +1229,9 @@ def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_taxonomy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1235,6 +1241,49 @@ def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): ) +def test_create_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_taxonomy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_taxonomy + ] = mock_rpc + request = {} + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_taxonomy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1258,6 +1307,56 @@ async def test_create_data_taxonomy_empty_call_async(): assert args[0] == gcd_data_taxonomy.CreateDataTaxonomyRequest() +@pytest.mark.asyncio +async def test_create_data_taxonomy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_taxonomy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_taxonomy + ] = mock_object + + request = {} + await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_taxonomy_async( transport: str = "grpc_asyncio", @@ -1515,6 +1614,9 @@ def test_update_data_taxonomy_empty_call(): with mock.patch.object( type(client.transport.update_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1538,12 +1640,58 @@ def test_update_data_taxonomy_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_taxonomy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest() +def test_update_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_taxonomy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_taxonomy + ] = mock_rpc + request = {} + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_taxonomy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1567,6 +1715,56 @@ async def test_update_data_taxonomy_empty_call_async(): assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest() +@pytest.mark.asyncio +async def test_update_data_taxonomy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_taxonomy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_taxonomy + ] = mock_object + + request = {} + await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_taxonomy_async( transport: str = "grpc_asyncio", @@ -1814,6 +2012,9 @@ def test_delete_data_taxonomy_empty_call(): with mock.patch.object( type(client.transport.delete_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1840,6 +2041,9 @@ def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_taxonomy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1849,6 +2053,49 @@ def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): ) +def test_delete_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_taxonomy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_taxonomy + ] = mock_rpc + request = {} + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_taxonomy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1872,6 +2119,56 @@ async def test_delete_data_taxonomy_empty_call_async(): assert args[0] == data_taxonomy.DeleteDataTaxonomyRequest() +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_taxonomy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_taxonomy + ] = mock_object + + request = {} + await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_taxonomy_async( transport: str = "grpc_asyncio", @@ -2114,6 +2411,9 @@ def test_list_data_taxonomies_empty_call(): with mock.patch.object( type(client.transport.list_data_taxonomies), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_taxonomies() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2142,6 +2442,9 @@ def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_data_taxonomies), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_taxonomies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2153,6 +2456,45 @@ def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): ) +def test_list_data_taxonomies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_taxonomies in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_taxonomies + ] = mock_rpc + request = {} + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_taxonomies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2179,6 +2521,52 @@ async def test_list_data_taxonomies_empty_call_async(): assert args[0] == data_taxonomy.ListDataTaxonomiesRequest() +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_taxonomies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_taxonomies + ] = mock_object + + request = {} + await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_taxonomies_async( transport: str = "grpc_asyncio", @@ -2634,6 +3022,9 @@ def test_get_data_taxonomy_empty_call(): with mock.patch.object( type(client.transport.get_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2659,6 +3050,9 @@ def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_data_taxonomy), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_taxonomy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2667,6 +3061,43 @@ def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): ) +def test_get_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_taxonomy + ] = mock_rpc + request = {} + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_taxonomy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2698,6 +3129,52 @@ async def test_get_data_taxonomy_empty_call_async(): assert args[0] == data_taxonomy.GetDataTaxonomyRequest() +@pytest.mark.asyncio +async def test_get_data_taxonomy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_taxonomy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_taxonomy + ] = mock_object + + request = {} + await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_taxonomy_async( transport: str = "grpc_asyncio", request_type=data_taxonomy.GetDataTaxonomyRequest @@ -2949,6 +3426,9 @@ def test_create_data_attribute_binding_empty_call(): with mock.patch.object( type(client.transport.create_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_attribute_binding() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2975,6 +3455,9 @@ def test_create_data_attribute_binding_non_empty_request_with_auto_populated_fie with mock.patch.object( type(client.transport.create_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_attribute_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2984,6 +3467,50 @@ def test_create_data_attribute_binding_non_empty_request_with_auto_populated_fie ) +def test_create_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_attribute_binding + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_attribute_binding + ] = mock_rpc + request = {} + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_attribute_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3007,6 +3534,56 @@ async def test_create_data_attribute_binding_empty_call_async(): assert args[0] == data_taxonomy.CreateDataAttributeBindingRequest() +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_attribute_binding + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_attribute_binding + ] = mock_object + + request = {} + await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_attribute_binding_async( transport: str = "grpc_asyncio", @@ -3272,6 +3849,9 @@ def test_update_data_attribute_binding_empty_call(): with mock.patch.object( type(client.transport.update_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_attribute_binding() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3295,12 +3875,59 @@ def test_update_data_attribute_binding_non_empty_request_with_auto_populated_fie with mock.patch.object( type(client.transport.update_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_attribute_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest() +def test_update_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_attribute_binding + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_attribute_binding + ] = mock_rpc + request = {} + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_attribute_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3324,6 +3951,56 @@ async def test_update_data_attribute_binding_empty_call_async(): assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest() +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_attribute_binding + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_attribute_binding + ] = mock_object + + request = {} + await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_attribute_binding_async( transport: str = "grpc_asyncio", @@ -3579,6 +4256,9 @@ def test_delete_data_attribute_binding_empty_call(): with mock.patch.object( type(client.transport.delete_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_attribute_binding() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3605,6 +4285,9 @@ def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_fie with mock.patch.object( type(client.transport.delete_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_attribute_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3614,6 +4297,50 @@ def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_fie ) +def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_attribute_binding + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_attribute_binding + ] = mock_rpc + request = {} + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_attribute_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3637,6 +4364,56 @@ async def test_delete_data_attribute_binding_empty_call_async(): assert args[0] == data_taxonomy.DeleteDataAttributeBindingRequest() +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_attribute_binding + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_attribute_binding + ] = mock_object + + request = {} + await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_attribute_binding_async( transport: str = "grpc_asyncio", @@ -3879,6 +4656,9 @@ def test_list_data_attribute_bindings_empty_call(): with mock.patch.object( type(client.transport.list_data_attribute_bindings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_attribute_bindings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3907,6 +4687,9 @@ def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_fiel with mock.patch.object( type(client.transport.list_data_attribute_bindings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_attribute_bindings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3918,6 +4701,46 @@ def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_fiel ) +def test_list_data_attribute_bindings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_attribute_bindings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_attribute_bindings + ] = mock_rpc + request = {} + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_attribute_bindings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3944,6 +4767,52 @@ async def test_list_data_attribute_bindings_empty_call_async(): assert args[0] == data_taxonomy.ListDataAttributeBindingsRequest() +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_attribute_bindings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_attribute_bindings + ] = mock_object + + request = {} + await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_attribute_bindings_async( transport: str = "grpc_asyncio", @@ -4398,6 +5267,9 @@ def test_get_data_attribute_binding_empty_call(): with mock.patch.object( type(client.transport.get_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_attribute_binding() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4423,6 +5295,9 @@ def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field( with mock.patch.object( type(client.transport.get_data_attribute_binding), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_attribute_binding(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4431,6 +5306,46 @@ def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field( ) +def test_get_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_attribute_binding + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_attribute_binding + ] = mock_rpc + request = {} + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_attribute_binding_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4461,6 +5376,52 @@ async def test_get_data_attribute_binding_empty_call_async(): assert args[0] == data_taxonomy.GetDataAttributeBindingRequest() +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_attribute_binding + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_attribute_binding + ] = mock_object + + request = {} + await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_attribute_binding_async( transport: str = "grpc_asyncio", @@ -4711,6 +5672,9 @@ def test_create_data_attribute_empty_call(): with mock.patch.object( type(client.transport.create_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_attribute() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4737,6 +5701,9 @@ def test_create_data_attribute_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_data_attribute(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4746,6 +5713,50 @@ def test_create_data_attribute_non_empty_request_with_auto_populated_field(): ) +def test_create_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_attribute + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_data_attribute + ] = mock_rpc + request = {} + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_attribute_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4769,6 +5780,56 @@ async def test_create_data_attribute_empty_call_async(): assert args[0] == data_taxonomy.CreateDataAttributeRequest() +@pytest.mark.asyncio +async def test_create_data_attribute_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_attribute + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_attribute + ] = mock_object + + request = {} + await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_data_attribute_async( transport: str = "grpc_asyncio", @@ -5026,6 +6087,9 @@ def test_update_data_attribute_empty_call(): with mock.patch.object( type(client.transport.update_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_attribute() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5049,12 +6113,59 @@ def test_update_data_attribute_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_data_attribute(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == data_taxonomy.UpdateDataAttributeRequest() +def test_update_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_attribute + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_attribute + ] = mock_rpc + request = {} + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_attribute_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5078,6 +6189,56 @@ async def test_update_data_attribute_empty_call_async(): assert args[0] == data_taxonomy.UpdateDataAttributeRequest() +@pytest.mark.asyncio +async def test_update_data_attribute_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_attribute + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_attribute + ] = mock_object + + request = {} + await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_data_attribute_async( transport: str = "grpc_asyncio", @@ -5325,6 +6486,9 @@ def test_delete_data_attribute_empty_call(): with mock.patch.object( type(client.transport.delete_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_attribute() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5351,6 +6515,9 @@ def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_data_attribute(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5360,6 +6527,50 @@ def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): ) +def test_delete_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_attribute + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_data_attribute + ] = mock_rpc + request = {} + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_attribute_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5383,6 +6594,56 @@ async def test_delete_data_attribute_empty_call_async(): assert args[0] == data_taxonomy.DeleteDataAttributeRequest() +@pytest.mark.asyncio +async def test_delete_data_attribute_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_attribute + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_attribute + ] = mock_object + + request = {} + await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_data_attribute_async( transport: str = "grpc_asyncio", @@ -5625,6 +6886,9 @@ def test_list_data_attributes_empty_call(): with mock.patch.object( type(client.transport.list_data_attributes), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_attributes() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5653,6 +6917,9 @@ def test_list_data_attributes_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_data_attributes), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_data_attributes(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5664,6 +6931,45 @@ def test_list_data_attributes_non_empty_request_with_auto_populated_field(): ) +def test_list_data_attributes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_attributes in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_attributes + ] = mock_rpc + request = {} + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_attributes_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5690,6 +6996,52 @@ async def test_list_data_attributes_empty_call_async(): assert args[0] == data_taxonomy.ListDataAttributesRequest() +@pytest.mark.asyncio +async def test_list_data_attributes_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_attributes + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_attributes + ] = mock_object + + request = {} + await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_data_attributes_async( transport: str = "grpc_asyncio", @@ -6145,6 +7497,9 @@ def test_get_data_attribute_empty_call(): with mock.patch.object( type(client.transport.get_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_attribute() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6170,6 +7525,9 @@ def test_get_data_attribute_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_data_attribute), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_data_attribute(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6178,6 +7536,45 @@ def test_get_data_attribute_non_empty_request_with_auto_populated_field(): ) +def test_get_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_attribute in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_attribute + ] = mock_rpc + request = {} + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_attribute_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6209,6 +7606,52 @@ async def test_get_data_attribute_empty_call_async(): assert args[0] == data_taxonomy.GetDataAttributeRequest() +@pytest.mark.asyncio +async def test_get_data_attribute_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_attribute + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_attribute + ] = mock_object + + request = {} + await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_data_attribute_async( transport: str = "grpc_asyncio", request_type=data_taxonomy.GetDataAttributeRequest diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index a66419263783..ac97d393aa18 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -1168,6 +1168,9 @@ def test_create_lake_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_lake() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1192,6 +1195,9 @@ def test_create_lake_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_lake(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1201,6 +1207,45 @@ def test_create_lake_non_empty_request_with_auto_populated_field(): ) +def test_create_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc + request = {} + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_lake_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1222,6 +1267,56 @@ async def test_create_lake_empty_call_async(): assert args[0] == service.CreateLakeRequest() +@pytest.mark.asyncio +async def test_create_lake_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_lake + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_lake + ] = mock_object + + request = {} + await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_lake_async( transport: str = "grpc_asyncio", request_type=service.CreateLakeRequest @@ -1464,6 +1559,9 @@ def test_update_lake_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_lake() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1485,12 +1583,54 @@ def test_update_lake_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_lake(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == service.UpdateLakeRequest() +def test_update_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc + request = {} + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_lake_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1512,6 +1652,56 @@ async def test_update_lake_empty_call_async(): assert args[0] == service.UpdateLakeRequest() +@pytest.mark.asyncio +async def test_update_lake_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_lake + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_lake + ] = mock_object + + request = {} + await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_lake_async( transport: str = "grpc_asyncio", request_type=service.UpdateLakeRequest @@ -1744,6 +1934,9 @@ def test_delete_lake_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_lake() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1767,6 +1960,9 @@ def test_delete_lake_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_lake(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1775,6 +1971,45 @@ def test_delete_lake_non_empty_request_with_auto_populated_field(): ) +def test_delete_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc + request = {} + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_lake_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1796,6 +2031,56 @@ async def test_delete_lake_empty_call_async(): assert args[0] == service.DeleteLakeRequest() +@pytest.mark.asyncio +async def test_delete_lake_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_lake + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_lake + ] = mock_object + + request = {} + await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_lake_async( transport: str = "grpc_asyncio", request_type=service.DeleteLakeRequest @@ -2023,6 +2308,9 @@ def test_list_lakes_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_lakes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_lakes() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2049,6 +2337,9 @@ def test_list_lakes_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_lakes), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_lakes(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2060,6 +2351,41 @@ def test_list_lakes_non_empty_request_with_auto_populated_field(): ) +def test_list_lakes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lakes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc + request = {} + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_lakes_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2084,6 +2410,50 @@ async def test_list_lakes_empty_call_async(): assert args[0] == service.ListLakesRequest() +@pytest.mark.asyncio +async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_lakes + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_lakes + ] = mock_object + + request = {} + await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_lakes_async( transport: str = "grpc_asyncio", request_type=service.ListLakesRequest @@ -2514,6 +2884,9 @@ def test_get_lake_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_lake() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2537,6 +2910,9 @@ def test_get_lake_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_lake), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_lake(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2545,6 +2921,41 @@ def test_get_lake_non_empty_request_with_auto_populated_field(): ) +def test_get_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc + request = {} + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_lake_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2573,6 +2984,50 @@ async def test_get_lake_empty_call_async(): assert args[0] == service.GetLakeRequest() +@pytest.mark.asyncio +async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_lake + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_lake + ] = mock_object + + request = {} + await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_lake_async( transport: str = "grpc_asyncio", request_type=service.GetLakeRequest @@ -2811,6 +3266,9 @@ def test_list_lake_actions_empty_call(): with mock.patch.object( type(client.transport.list_lake_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_lake_actions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2837,6 +3295,9 @@ def test_list_lake_actions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_lake_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_lake_actions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2846,6 +3307,43 @@ def test_list_lake_actions_non_empty_request_with_auto_populated_field(): ) +def test_list_lake_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lake_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_lake_actions + ] = mock_rpc + request = {} + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_lake_actions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2871,6 +3369,52 @@ async def test_list_lake_actions_empty_call_async(): assert args[0] == service.ListLakeActionsRequest() +@pytest.mark.asyncio +async def test_list_lake_actions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_lake_actions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_lake_actions + ] = mock_object + + request = {} + await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_lake_actions_async( transport: str = "grpc_asyncio", request_type=service.ListLakeActionsRequest @@ -3304,6 +3848,9 @@ def test_create_zone_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_zone() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3328,6 +3875,9 @@ def test_create_zone_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3337,6 +3887,45 @@ def test_create_zone_non_empty_request_with_auto_populated_field(): ) +def test_create_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3359,17 +3948,67 @@ async def test_create_zone_empty_call_async(): @pytest.mark.asyncio -async def test_create_zone_async( - transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest +async def test_create_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = DataplexServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_zone + ] = mock_object + + request = {} + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_zone_async( + transport: str = "grpc_asyncio", request_type=service.CreateZoneRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_zone), "__call__") as call: @@ -3600,6 +4239,9 @@ def test_update_zone_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_zone() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3621,12 +4263,54 @@ def test_update_zone_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == service.UpdateZoneRequest() +def test_update_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3648,6 +4332,56 @@ async def test_update_zone_empty_call_async(): assert args[0] == service.UpdateZoneRequest() +@pytest.mark.asyncio +async def test_update_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_zone + ] = mock_object + + request = {} + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_zone_async( transport: str = "grpc_asyncio", request_type=service.UpdateZoneRequest @@ -3880,6 +4614,9 @@ def test_delete_zone_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_zone() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3903,6 +4640,9 @@ def test_delete_zone_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3911,6 +4651,45 @@ def test_delete_zone_non_empty_request_with_auto_populated_field(): ) +def test_delete_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3932,6 +4711,56 @@ async def test_delete_zone_empty_call_async(): assert args[0] == service.DeleteZoneRequest() +@pytest.mark.asyncio +async def test_delete_zone_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_zone + ] = mock_object + + request = {} + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_zone_async( transport: str = "grpc_asyncio", request_type=service.DeleteZoneRequest @@ -4157,6 +4986,9 @@ def test_list_zones_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_zones() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4183,6 +5015,9 @@ def test_list_zones_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_zones), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_zones(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4194,6 +5029,41 @@ def test_list_zones_non_empty_request_with_auto_populated_field(): ) +def test_list_zones_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_zones_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4217,6 +5087,50 @@ async def test_list_zones_empty_call_async(): assert args[0] == service.ListZonesRequest() +@pytest.mark.asyncio +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_zones + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_zones + ] = mock_object + + request = {} + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_zones_async( transport: str = "grpc_asyncio", request_type=service.ListZonesRequest @@ -4645,6 +5559,9 @@ def test_get_zone_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_zone() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4668,6 +5585,9 @@ def test_get_zone_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_zone), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_zone(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4676,6 +5596,41 @@ def test_get_zone_non_empty_request_with_auto_populated_field(): ) +def test_get_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_zone_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4704,6 +5659,50 @@ async def test_get_zone_empty_call_async(): assert args[0] == service.GetZoneRequest() +@pytest.mark.asyncio +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_zone + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_zone + ] = mock_object + + request = {} + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_zone_async( transport: str = "grpc_asyncio", request_type=service.GetZoneRequest @@ -4942,6 +5941,9 @@ def test_list_zone_actions_empty_call(): with mock.patch.object( type(client.transport.list_zone_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_zone_actions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4968,6 +5970,9 @@ def test_list_zone_actions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_zone_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_zone_actions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4977,6 +5982,43 @@ def test_list_zone_actions_non_empty_request_with_auto_populated_field(): ) +def test_list_zone_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zone_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_zone_actions + ] = mock_rpc + request = {} + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_zone_actions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5002,6 +6044,52 @@ async def test_list_zone_actions_empty_call_async(): assert args[0] == service.ListZoneActionsRequest() +@pytest.mark.asyncio +async def test_list_zone_actions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_zone_actions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_zone_actions + ] = mock_object + + request = {} + await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_zone_actions_async( transport: str = "grpc_asyncio", request_type=service.ListZoneActionsRequest @@ -5435,6 +6523,9 @@ def test_create_asset_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_asset() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5459,6 +6550,9 @@ def test_create_asset_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_asset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5468,6 +6562,45 @@ def test_create_asset_non_empty_request_with_auto_populated_field(): ) +def test_create_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc + request = {} + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_asset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5489,6 +6622,56 @@ async def test_create_asset_empty_call_async(): assert args[0] == service.CreateAssetRequest() +@pytest.mark.asyncio +async def test_create_asset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_asset + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_asset + ] = mock_object + + request = {} + await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_asset_async( transport: str = "grpc_asyncio", request_type=service.CreateAssetRequest @@ -5731,6 +6914,9 @@ def test_update_asset_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_asset() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5752,13 +6938,55 @@ def test_update_asset_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_asset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == service.UpdateAssetRequest() -@pytest.mark.asyncio +def test_update_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc + request = {} + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio async def test_update_asset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5779,6 +7007,56 @@ async def test_update_asset_empty_call_async(): assert args[0] == service.UpdateAssetRequest() +@pytest.mark.asyncio +async def test_update_asset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_asset + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_asset + ] = mock_object + + request = {} + await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_asset_async( transport: str = "grpc_asyncio", request_type=service.UpdateAssetRequest @@ -6011,6 +7289,9 @@ def test_delete_asset_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_asset() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6034,6 +7315,9 @@ def test_delete_asset_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_asset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6042,6 +7326,45 @@ def test_delete_asset_non_empty_request_with_auto_populated_field(): ) +def test_delete_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc + request = {} + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_asset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6063,6 +7386,56 @@ async def test_delete_asset_empty_call_async(): assert args[0] == service.DeleteAssetRequest() +@pytest.mark.asyncio +async def test_delete_asset_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_asset + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_asset + ] = mock_object + + request = {} + await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_asset_async( transport: str = "grpc_asyncio", request_type=service.DeleteAssetRequest @@ -6288,6 +7661,9 @@ def test_list_assets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_assets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6314,6 +7690,9 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6325,6 +7704,41 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): ) +def test_list_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6348,6 +7762,52 @@ async def test_list_assets_empty_call_async(): assert args[0] == service.ListAssetsRequest() +@pytest.mark.asyncio +async def test_list_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_assets + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_assets + ] = mock_object + + request = {} + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_assets_async( transport: str = "grpc_asyncio", request_type=service.ListAssetsRequest @@ -6774,6 +8234,9 @@ def test_get_asset_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_asset() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6797,6 +8260,9 @@ def test_get_asset_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_asset), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_asset(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6805,6 +8271,41 @@ def test_get_asset_non_empty_request_with_auto_populated_field(): ) +def test_get_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc + request = {} + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_asset_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6832,6 +8333,50 @@ async def test_get_asset_empty_call_async(): assert args[0] == service.GetAssetRequest() +@pytest.mark.asyncio +async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_asset + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_asset + ] = mock_object + + request = {} + await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_asset_async( transport: str = "grpc_asyncio", request_type=service.GetAssetRequest @@ -7068,6 +8613,9 @@ def test_list_asset_actions_empty_call(): with mock.patch.object( type(client.transport.list_asset_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_asset_actions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7094,6 +8642,9 @@ def test_list_asset_actions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_asset_actions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_asset_actions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7103,6 +8654,45 @@ def test_list_asset_actions_non_empty_request_with_auto_populated_field(): ) +def test_list_asset_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_asset_actions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_asset_actions + ] = mock_rpc + request = {} + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_asset_actions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7128,6 +8718,52 @@ async def test_list_asset_actions_empty_call_async(): assert args[0] == service.ListAssetActionsRequest() +@pytest.mark.asyncio +async def test_list_asset_actions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_asset_actions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_asset_actions + ] = mock_object + + request = {} + await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_asset_actions_async( transport: str = "grpc_asyncio", request_type=service.ListAssetActionsRequest @@ -7561,6 +9197,9 @@ def test_create_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_task() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7585,6 +9224,9 @@ def test_create_task_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7594,6 +9236,45 @@ def test_create_task_non_empty_request_with_auto_populated_field(): ) +def test_create_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_task] = mock_rpc + request = {} + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7615,6 +9296,56 @@ async def test_create_task_empty_call_async(): assert args[0] == service.CreateTaskRequest() +@pytest.mark.asyncio +async def test_create_task_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_task + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_task + ] = mock_object + + request = {} + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_task_async( transport: str = "grpc_asyncio", request_type=service.CreateTaskRequest @@ -7857,6 +9588,9 @@ def test_update_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_task() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7878,12 +9612,54 @@ def test_update_task_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == service.UpdateTaskRequest() +def test_update_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_task] = mock_rpc + request = {} + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7906,15 +9682,65 @@ async def test_update_task_empty_call_async(): @pytest.mark.asyncio -async def test_update_task_async( - transport: str = "grpc_asyncio", request_type=service.UpdateTaskRequest +async def test_update_task_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = DataplexServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_task + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_task + ] = mock_object + + request = {} + await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_task_async( + transport: str = "grpc_asyncio", request_type=service.UpdateTaskRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() @@ -8137,6 +9963,9 @@ def test_delete_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_task() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8160,6 +9989,9 @@ def test_delete_task_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8168,6 +10000,45 @@ def test_delete_task_non_empty_request_with_auto_populated_field(): ) +def test_delete_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc + request = {} + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8189,6 +10060,56 @@ async def test_delete_task_empty_call_async(): assert args[0] == service.DeleteTaskRequest() +@pytest.mark.asyncio +async def test_delete_task_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_task + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_task + ] = mock_object + + request = {} + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_task_async( transport: str = "grpc_asyncio", request_type=service.DeleteTaskRequest @@ -8416,6 +10337,9 @@ def test_list_tasks_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_tasks() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8442,6 +10366,9 @@ def test_list_tasks_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_tasks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8453,6 +10380,41 @@ def test_list_tasks_non_empty_request_with_auto_populated_field(): ) +def test_list_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc + request = {} + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_tasks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8477,6 +10439,50 @@ async def test_list_tasks_empty_call_async(): assert args[0] == service.ListTasksRequest() +@pytest.mark.asyncio +async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_tasks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_tasks + ] = mock_object + + request = {} + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_tasks_async( transport: str = "grpc_asyncio", request_type=service.ListTasksRequest @@ -8905,6 +10911,9 @@ def test_get_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_task() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8928,6 +10937,9 @@ def test_get_task_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8936,6 +10948,41 @@ def test_get_task_non_empty_request_with_auto_populated_field(): ) +def test_get_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + request = {} + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8963,6 +11010,50 @@ async def test_get_task_empty_call_async(): assert args[0] == service.GetTaskRequest() +@pytest.mark.asyncio +async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_task + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_task + ] = mock_object + + request = {} + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_task_async( transport: str = "grpc_asyncio", request_type=service.GetTaskRequest @@ -9195,6 +11286,9 @@ def test_list_jobs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_jobs() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9219,6 +11313,9 @@ def test_list_jobs_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_jobs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9228,6 +11325,41 @@ def test_list_jobs_non_empty_request_with_auto_populated_field(): ) +def test_list_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_jobs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9251,6 +11383,50 @@ async def test_list_jobs_empty_call_async(): assert args[0] == service.ListJobsRequest() +@pytest.mark.asyncio +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_jobs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_jobs + ] = mock_object + + request = {} + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=service.ListJobsRequest @@ -9666,6 +11842,9 @@ def test_run_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_task() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9689,6 +11868,9 @@ def test_run_task_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.run_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9697,6 +11879,41 @@ def test_run_task_non_empty_request_with_auto_populated_field(): ) +def test_run_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_task] = mock_rpc + request = {} + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_run_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9718,6 +11935,50 @@ async def test_run_task_empty_call_async(): assert args[0] == service.RunTaskRequest() +@pytest.mark.asyncio +async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.run_task + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.run_task + ] = mock_object + + request = {} + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_run_task_async( transport: str = "grpc_asyncio", request_type=service.RunTaskRequest @@ -9957,6 +12218,9 @@ def test_get_job_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_job() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9980,6 +12244,9 @@ def test_get_job_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9988,6 +12255,41 @@ def test_get_job_non_empty_request_with_auto_populated_field(): ) +def test_get_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -10018,6 +12320,50 @@ async def test_get_job_empty_call_async(): assert args[0] == service.GetJobRequest() +@pytest.mark.asyncio +async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_job + ] = mock_object + + request = {} + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_job_async( transport: str = "grpc_asyncio", request_type=service.GetJobRequest @@ -10253,6 +12599,9 @@ def test_cancel_job_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10276,6 +12625,9 @@ def test_cancel_job_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.cancel_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10284,16 +12636,51 @@ def test_cancel_job_non_empty_request_with_auto_populated_field(): ) -@pytest.mark.asyncio -async def test_cancel_job_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataplexServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. +def test_cancel_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_job_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -10303,6 +12690,50 @@ async def test_cancel_job_empty_call_async(): assert args[0] == service.CancelJobRequest() +@pytest.mark.asyncio +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_job + ] = mock_object + + request = {} + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_cancel_job_async( transport: str = "grpc_asyncio", request_type=service.CancelJobRequest @@ -10523,6 +12954,9 @@ def test_create_environment_empty_call(): with mock.patch.object( type(client.transport.create_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_environment() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10549,6 +12983,9 @@ def test_create_environment_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_environment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10558,6 +12995,49 @@ def test_create_environment_non_empty_request_with_auto_populated_field(): ) +def test_create_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_environment in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_environment + ] = mock_rpc + request = {} + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_environment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -10581,6 +13061,56 @@ async def test_create_environment_empty_call_async(): assert args[0] == service.CreateEnvironmentRequest() +@pytest.mark.asyncio +async def test_create_environment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_environment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_environment + ] = mock_object + + request = {} + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_environment_async( transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest @@ -10837,6 +13367,9 @@ def test_update_environment_empty_call(): with mock.patch.object( type(client.transport.update_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_environment() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10860,12 +13393,58 @@ def test_update_environment_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_environment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == service.UpdateEnvironmentRequest() +def test_update_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_environment in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_environment + ] = mock_rpc + request = {} + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_environment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -10889,6 +13468,56 @@ async def test_update_environment_empty_call_async(): assert args[0] == service.UpdateEnvironmentRequest() +@pytest.mark.asyncio +async def test_update_environment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_environment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_environment + ] = mock_object + + request = {} + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_environment_async( transport: str = "grpc_asyncio", request_type=service.UpdateEnvironmentRequest @@ -11135,6 +13764,9 @@ def test_delete_environment_empty_call(): with mock.patch.object( type(client.transport.delete_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_environment() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11160,6 +13792,9 @@ def test_delete_environment_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_environment), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_environment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11168,6 +13803,49 @@ def test_delete_environment_non_empty_request_with_auto_populated_field(): ) +def test_delete_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_environment in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_environment + ] = mock_rpc + request = {} + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_environment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -11191,6 +13869,56 @@ async def test_delete_environment_empty_call_async(): assert args[0] == service.DeleteEnvironmentRequest() +@pytest.mark.asyncio +async def test_delete_environment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_environment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_environment + ] = mock_object + + request = {} + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_environment_async( transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest @@ -11430,6 +14158,9 @@ def test_list_environments_empty_call(): with mock.patch.object( type(client.transport.list_environments), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_environments() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11458,6 +14189,9 @@ def test_list_environments_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_environments), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_environments(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11469,6 +14203,43 @@ def test_list_environments_non_empty_request_with_auto_populated_field(): ) +def test_list_environments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_environments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_environments + ] = mock_rpc + request = {} + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_environments_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -11494,6 +14265,52 @@ async def test_list_environments_empty_call_async(): assert args[0] == service.ListEnvironmentsRequest() +@pytest.mark.asyncio +async def test_list_environments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_environments + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_environments + ] = mock_object + + request = {} + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_environments_async( transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest @@ -11938,6 +14755,9 @@ def test_get_environment_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_environment() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11961,6 +14781,9 @@ def test_get_environment_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_environment(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11969,6 +14792,41 @@ def test_get_environment_non_empty_request_with_auto_populated_field(): ) +def test_get_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc + request = {} + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_environment_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -11996,6 +14854,52 @@ async def test_get_environment_empty_call_async(): assert args[0] == service.GetEnvironmentRequest() +@pytest.mark.asyncio +async def test_get_environment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_environment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_environment + ] = mock_object + + request = {} + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_environment_async( transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest @@ -12228,6 +15132,9 @@ def test_list_sessions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sessions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -12253,6 +15160,9 @@ def test_list_sessions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sessions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -12263,6 +15173,41 @@ def test_list_sessions_non_empty_request_with_auto_populated_field(): ) +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_sessions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -12286,6 +15231,52 @@ async def test_list_sessions_empty_call_async(): assert args[0] == service.ListSessionsRequest() +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sessions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sessions + ] = mock_object + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_sessions_async( transport: str = "grpc_asyncio", request_type=service.ListSessionsRequest diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index c926c6857278..a56f6142d742 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -1181,6 +1181,9 @@ def test_create_entity_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entity() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1204,6 +1207,9 @@ def test_create_entity_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_entity(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1212,6 +1218,41 @@ def test_create_entity_non_empty_request_with_auto_populated_field(): ) +def test_create_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc + request = {} + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_entity_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1246,6 +1287,52 @@ async def test_create_entity_empty_call_async(): assert args[0] == metadata_.CreateEntityRequest() +@pytest.mark.asyncio +async def test_create_entity_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_entity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entity + ] = mock_object + + request = {} + await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_entity_async( transport: str = "grpc_asyncio", request_type=metadata_.CreateEntityRequest @@ -1524,6 +1611,9 @@ def test_update_entity_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entity() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1545,12 +1635,50 @@ def test_update_entity_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_entity(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == metadata_.UpdateEntityRequest() +def test_update_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc + request = {} + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_entity_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1585,6 +1713,52 @@ async def test_update_entity_empty_call_async(): assert args[0] == metadata_.UpdateEntityRequest() +@pytest.mark.asyncio +async def test_update_entity_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_entity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_entity + ] = mock_object + + request = {} + await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_entity_async( transport: str = "grpc_asyncio", request_type=metadata_.UpdateEntityRequest @@ -1748,6 +1922,9 @@ def test_delete_entity_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entity() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1772,6 +1949,9 @@ def test_delete_entity_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_entity(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1781,6 +1961,41 @@ def test_delete_entity_non_empty_request_with_auto_populated_field(): ) +def test_delete_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc + request = {} + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entity_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1800,6 +2015,52 @@ async def test_delete_entity_empty_call_async(): assert args[0] == metadata_.DeleteEntityRequest() +@pytest.mark.asyncio +async def test_delete_entity_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_entity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entity + ] = mock_object + + request = {} + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_entity_async( transport: str = "grpc_asyncio", request_type=metadata_.DeleteEntityRequest @@ -2041,6 +2302,9 @@ def test_get_entity_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entity() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2064,6 +2328,9 @@ def test_get_entity_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_entity(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2072,6 +2339,41 @@ def test_get_entity_non_empty_request_with_auto_populated_field(): ) +def test_get_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc + request = {} + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_entity_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2106,6 +2408,50 @@ async def test_get_entity_empty_call_async(): assert args[0] == metadata_.GetEntityRequest() +@pytest.mark.asyncio +async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_entity + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entity + ] = mock_object + + request = {} + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_entity_async( transport: str = "grpc_asyncio", request_type=metadata_.GetEntityRequest @@ -2352,6 +2698,9 @@ def test_list_entities_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entities() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2377,6 +2726,9 @@ def test_list_entities_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_entities(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2387,6 +2739,41 @@ def test_list_entities_non_empty_request_with_auto_populated_field(): ) +def test_list_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc + request = {} + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_entities_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2410,6 +2797,52 @@ async def test_list_entities_empty_call_async(): assert args[0] == metadata_.ListEntitiesRequest() +@pytest.mark.asyncio +async def test_list_entities_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_entities + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_entities + ] = mock_object + + request = {} + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_entities_async( transport: str = "grpc_asyncio", request_type=metadata_.ListEntitiesRequest @@ -2834,6 +3267,9 @@ def test_create_partition_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_partition() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2857,6 +3293,9 @@ def test_create_partition_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_partition(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2865,6 +3304,43 @@ def test_create_partition_non_empty_request_with_auto_populated_field(): ) +def test_create_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_partition + ] = mock_rpc + request = {} + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_partition_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2891,6 +3367,52 @@ async def test_create_partition_empty_call_async(): assert args[0] == metadata_.CreatePartitionRequest() +@pytest.mark.asyncio +async def test_create_partition_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_partition + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_partition + ] = mock_object + + request = {} + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_partition_async( transport: str = "grpc_asyncio", request_type=metadata_.CreatePartitionRequest @@ -3128,6 +3650,9 @@ def test_delete_partition_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_partition() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3152,6 +3677,9 @@ def test_delete_partition_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_partition(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3161,6 +3689,43 @@ def test_delete_partition_non_empty_request_with_auto_populated_field(): ) +def test_delete_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_partition + ] = mock_rpc + request = {} + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_partition_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3180,6 +3745,52 @@ async def test_delete_partition_empty_call_async(): assert args[0] == metadata_.DeletePartitionRequest() +@pytest.mark.asyncio +async def test_delete_partition_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_partition + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_partition + ] = mock_object + + request = {} + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_partition_async( transport: str = "grpc_asyncio", request_type=metadata_.DeletePartitionRequest @@ -3405,6 +4016,9 @@ def test_get_partition_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_partition() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3428,6 +4042,9 @@ def test_get_partition_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_partition), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_partition(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3436,6 +4053,41 @@ def test_get_partition_non_empty_request_with_auto_populated_field(): ) +def test_get_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc + request = {} + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_partition_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3462,6 +4114,52 @@ async def test_get_partition_empty_call_async(): assert args[0] == metadata_.GetPartitionRequest() +@pytest.mark.asyncio +async def test_get_partition_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_partition + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_partition + ] = mock_object + + request = {} + await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_partition_async( transport: str = "grpc_asyncio", request_type=metadata_.GetPartitionRequest @@ -3692,6 +4390,9 @@ def test_list_partitions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_partitions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_partitions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3717,6 +4418,9 @@ def test_list_partitions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_partitions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_partitions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3727,6 +4431,41 @@ def test_list_partitions_non_empty_request_with_auto_populated_field(): ) +def test_list_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc + request = {} + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_partitions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3750,6 +4489,52 @@ async def test_list_partitions_empty_call_async(): assert args[0] == metadata_.ListPartitionsRequest() +@pytest.mark.asyncio +async def test_list_partitions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_partitions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_partitions + ] = mock_object + + request = {} + await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_partitions_async( transport: str = "grpc_asyncio", request_type=metadata_.ListPartitionsRequest