diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json new file mode 100644 index 000000000000..4e5900f0322a --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.2", + "use": [ + "@autorest/python@5.8.1", + "@autorest/modelerfour@4.19.2" + ], + "commit": "b82bcdac797c79b0a88b7dd6572109e1b0ac36b3", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.1 --use=@autorest/modelerfour@4.19.2 --version=3.4.2", + "readme": "specification/streamanalytics/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json index 5ff7c85f8f06..2d5d14a7ee49 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json @@ -8,32 +8,35 @@ "base_url": "\u0027https://management.azure.com\u0027", "custom_base_url": null, "azure_arm": true, - "has_lro_operations": true + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" }, "global_parameters": { - "sync_method": { + "sync": { "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", + "signature": "credential, # type: \"TokenCredential\"", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials.TokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id, # type: str", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true } }, - "async_method": { + "async": { "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", + "signature": "credential: \"AsyncTokenCredential\",", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id: str,", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true @@ -41,14 +44,58 @@ }, "constant": { }, - "call": "credential, subscription_id" + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } }, "config": { "credential": true, "credential_scopes": ["https://management.azure.com/.default"], "credential_default_policy_type": "BearerTokenCredentialPolicy", "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" }, "operation_groups": { "functions": "FunctionsOperations", @@ -60,9 +107,5 @@ "operations": "Operations", "clusters": "ClustersOperations", "private_endpoints": "PrivateEndpointsOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" + } } \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py index 7322c3fb1613..171587be0f6d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -16,6 +16,7 @@ from typing import Any, Optional from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse from ._configuration import StreamAnalyticsManagementClientConfiguration from .operations import FunctionsOperations @@ -96,6 +97,24 @@ def __init__( self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + def close(self): # type: () -> None self._client.close() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index 3b909b5c8886..e5754a47ce68 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0rc1" +VERSION = "1.0.0b1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py index 99472b9429a0..bf3003dc8c32 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -8,6 +8,7 @@ from typing import Any, Optional, TYPE_CHECKING +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer @@ -93,6 +94,23 @@ def __init__( self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + async def close(self) -> None: await self._client.close() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py index 29088ce6710f..154d4012c05a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class ClustersOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,12 +47,12 @@ async def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Cluster": - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + **kwargs: Any + ) -> "_models.Cluster": + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -92,7 +92,7 @@ async def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -111,11 +111,11 @@ async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -134,8 +134,8 @@ async def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) @@ -143,7 +143,7 @@ async def begin_create_or_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -170,7 +170,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -188,11 +194,11 @@ async def _update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Cluster"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -230,7 +236,7 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -247,10 +253,10 @@ async def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -267,8 +273,8 @@ async def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) @@ -276,7 +282,7 @@ async def begin_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -302,7 +308,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -320,8 +332,8 @@ async def get( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> "models.Cluster": + **kwargs: Any + ) -> "_models.Cluster": """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -333,7 +345,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -364,7 +376,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -379,7 +391,7 @@ async def _delete_initial( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -412,7 +424,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -424,7 +436,7 @@ async def begin_delete( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the specified cluster. @@ -434,8 +446,8 @@ async def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -464,7 +476,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -480,8 +498,8 @@ def get_long_running_output(pipeline_response): def list_by_subscription( self, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response @@ -489,7 +507,7 @@ def list_by_subscription( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -534,7 +552,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -548,8 +566,8 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -559,7 +577,7 @@ def list_by_resource_group( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -605,7 +623,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -620,8 +638,8 @@ def list_streaming_jobs( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterJobListResult"]: """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -633,7 +651,7 @@ def list_streaming_jobs( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -680,7 +698,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py index 1d04fcab6693..58010e7379da 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class FunctionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,7 +76,7 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -139,10 +139,10 @@ async def update( resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -167,7 +167,7 @@ async def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -223,7 +223,7 @@ async def delete( resource_group_name: str, job_name: str, function_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a function from the streaming job. @@ -280,8 +280,8 @@ async def get( resource_group_name: str, job_name: str, function_name: str, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,7 +295,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -344,8 +344,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.FunctionListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.FunctionListResult"]: """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -361,7 +361,7 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -425,10 +425,10 @@ async def _test_initial( resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -485,9 +485,9 @@ async def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -506,8 +506,8 @@ async def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -515,7 +515,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -541,7 +541,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -560,9 +567,9 @@ async def retrieve_default_definition( resource_group_name: str, job_name: str, function_name: str, - function_retrieve_default_definition_parameters: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] = None, - **kwargs - ) -> "models.Function": + function_retrieve_default_definition_parameters: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] = None, + **kwargs: Any + ) -> "_models.Function": """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -579,7 +586,7 @@ async def retrieve_default_definition( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py index f7451e9e8aab..61cd58ceeade 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class InputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -75,7 +75,7 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -138,10 +138,10 @@ async def update( resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -165,7 +165,7 @@ async def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -221,7 +221,7 @@ async def delete( resource_group_name: str, job_name: str, input_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an input from the streaming job. @@ -278,8 +278,8 @@ async def get( resource_group_name: str, job_name: str, input_name: str, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -293,7 +293,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -342,8 +342,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.InputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.InputListResult"]: """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -359,7 +359,7 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -423,10 +423,10 @@ async def _test_initial( resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -483,9 +483,9 @@ async def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -503,8 +503,8 @@ async def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -512,7 +512,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -538,7 +538,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py index 6e073694446f..024b371bc629 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -33,7 +33,7 @@ class Operations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,8 +43,8 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs - ) -> AsyncIterable["models.OperationListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response @@ -52,7 +52,7 @@ def list( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py index a9d08028e8aa..191e25b95d56 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class OutputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,7 +76,7 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -139,10 +139,10 @@ async def update( resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -167,7 +167,7 @@ async def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -223,7 +223,7 @@ async def delete( resource_group_name: str, job_name: str, output_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an output from the streaming job. @@ -280,8 +280,8 @@ async def get( resource_group_name: str, job_name: str, output_name: str, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,7 +295,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -344,8 +344,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.OutputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OutputListResult"]: """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -361,7 +361,7 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -425,10 +425,10 @@ async def _test_initial( resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -485,9 +485,9 @@ async def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -505,8 +505,8 @@ async def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -514,7 +514,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -540,7 +540,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py index ba2975090858..a93cfac150b6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class PrivateEndpointsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: "models.PrivateEndpoint", + private_endpoint: "_models.PrivateEndpoint", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,7 +76,7 @@ async def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -117,7 +117,7 @@ async def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -137,8 +137,8 @@ async def get( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -152,7 +152,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -184,7 +184,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -200,7 +200,7 @@ async def _delete_initial( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -234,7 +234,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -247,7 +247,7 @@ async def begin_delete( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Delete the specified private endpoint. @@ -259,8 +259,8 @@ async def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -290,7 +290,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -308,8 +315,8 @@ def list_by_cluster( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.PrivateEndpointListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointListResult"]: """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -321,7 +328,7 @@ def list_by_cluster( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -368,7 +375,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py index d6cdd13560d3..5da2be40099c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class StreamingJobsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,12 +47,12 @@ async def _create_or_replace_initial( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + **kwargs: Any + ) -> "_models.StreamingJob": + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -113,11 +113,11 @@ async def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.StreamingJob"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.StreamingJob"]: """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -137,8 +137,8 @@ async def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) @@ -146,7 +146,7 @@ async def begin_create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -176,7 +176,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -194,10 +200,10 @@ async def update( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -219,7 +225,7 @@ async def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -273,7 +279,7 @@ async def _delete_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -315,7 +321,7 @@ async def begin_delete( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a streaming job. @@ -325,8 +331,8 @@ async def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -355,7 +361,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -374,8 +386,8 @@ async def get( resource_group_name: str, job_name: str, expand: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -392,7 +404,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -441,8 +453,8 @@ def list_by_resource_group( self, resource_group_name: str, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -457,7 +469,7 @@ def list_by_resource_group( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -518,8 +530,8 @@ async def get_next(next_link=None): def list( self, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -532,7 +544,7 @@ def list( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -593,8 +605,8 @@ async def _start_initial( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -644,8 +656,8 @@ async def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce output. @@ -658,8 +670,8 @@ async def begin_start( :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -689,7 +701,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -707,7 +725,7 @@ async def _stop_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -749,7 +767,7 @@ async def begin_stop( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a running streaming job. This will cause a running streaming job to stop processing input events and producing output. @@ -760,8 +778,8 @@ async def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -790,7 +808,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py index 7a94e1139245..d0d48f8ef6d1 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -5,15 +5,17 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +34,7 @@ class SubscriptionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,20 +45,19 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def list_quotas( self, location: str, - **kwargs - ) -> "models.SubscriptionQuotasListResult": + **kwargs: Any + ) -> "_models.SubscriptionQuotasListResult": """Retrieves the subscription's current quota information in a particular region. - :param location: The region in which to retrieve the subscription's quota information. You can - find out which regions Azure Stream Analytics is supported in here: - https://azure.microsoft.com/en-us/regions/. + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult, or the result of cls(response) :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -95,3 +96,603 @@ async def list_quotas( return deserialized list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore + + async def _test_query_initial( + self, + location: str, + streaming_job: "_models.StreamingJob", + **kwargs: Any + ) -> Optional["_models.QueryTestingResult"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.QueryTestingResult"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_query_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('QueryTestingResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery'} # type: ignore + + async def begin_test_query( + self, + location: str, + streaming_job: "_models.StreamingJob", + **kwargs: Any + ) -> AsyncLROPoller["_models.QueryTestingResult"]: + """Test the Stream Analytics query on a sample input. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param streaming_job: A streaming job object. This object defines the input, output, and + transformation for the query testing. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.QueryTestingResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryTestingResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_query_initial( + location=location, + streaming_job=streaming_job, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('QueryTestingResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_query.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery'} # type: ignore + + async def _compile_query_initial( + self, + location: str, + streaming_job: "_models.StreamingJob", + **kwargs: Any + ) -> "_models.QueryCompilationResult": + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryCompilationResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._compile_query_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueryCompilationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _compile_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery'} # type: ignore + + async def begin_compile_query( + self, + location: str, + streaming_job: "_models.StreamingJob", + **kwargs: Any + ) -> AsyncLROPoller["_models.QueryCompilationResult"]: + """Compile the Stream Analytics query. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param streaming_job: A streaming job object. This object defines the input, output, and + transformation for the query compilation. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either QueryCompilationResult or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.QueryCompilationResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryCompilationResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._compile_query_initial( + location=location, + streaming_job=streaming_job, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('QueryCompilationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_compile_query.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery'} # type: ignore + + async def _sample_input_initial( + self, + location: str, + sample_input: "_models.SampleInput", + **kwargs: Any + ) -> "_models.SampleInputResult": + cls = kwargs.pop('cls', None) # type: ClsType["_models.SampleInputResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._sample_input_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sample_input, 'SampleInput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SampleInputResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _sample_input_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput'} # type: ignore + + async def begin_sample_input( + self, + location: str, + sample_input: "_models.SampleInput", + **kwargs: Any + ) -> AsyncLROPoller["_models.SampleInputResult"]: + """Sample the Stream Analytics input data. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input + data. + :type sample_input: ~stream_analytics_management_client.models.SampleInput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.SampleInputResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SampleInputResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._sample_input_initial( + location=location, + sample_input=sample_input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SampleInputResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_sample_input.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput'} # type: ignore + + async def _test_input_initial( + self, + location: str, + test_input: "_models.TestInput", + **kwargs: Any + ) -> "_models.TestDatasourceResult": + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_input_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(test_input, 'TestInput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_input_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput'} # type: ignore + + async def begin_test_input( + self, + location: str, + test_input: "_models.TestInput", + **kwargs: Any + ) -> AsyncLROPoller["_models.TestDatasourceResult"]: + """Test the Stream Analytics input. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param test_input: Defines the necessary parameters for testing the Stream Analytics input. + :type test_input: ~stream_analytics_management_client.models.TestInput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.TestDatasourceResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_input_initial( + location=location, + test_input=test_input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_input.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput'} # type: ignore + + async def _test_output_initial( + self, + location: str, + test_output: "_models.TestOutput", + **kwargs: Any + ) -> "_models.TestDatasourceResult": + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_output_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(test_output, 'TestOutput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_output_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput'} # type: ignore + + async def begin_test_output( + self, + location: str, + test_output: "_models.TestOutput", + **kwargs: Any + ) -> AsyncLROPoller["_models.TestDatasourceResult"]: + """Test the Stream Analytics output. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param test_output: Defines the necessary parameters for testing the Stream Analytics output. + :type test_output: ~stream_analytics_management_client.models.TestOutput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~stream_analytics_management_client.models.TestDatasourceResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._test_output_initial( + location=location, + test_output=test_output, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_output.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py index 066f372f8baa..a390c859eea4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +32,7 @@ class TransformationsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -45,11 +45,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -75,7 +75,7 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -138,10 +138,10 @@ async def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -167,7 +167,7 @@ async def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -223,8 +223,8 @@ async def get( resource_group_name: str, job_name: str, transformation_name: str, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -238,7 +238,7 @@ async def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index bdaa063b8a5d..7b146f7a3fe5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -54,8 +54,8 @@ from ._models_py3 import Diagnostics from ._models_py3 import DocumentDbOutputDataSource from ._models_py3 import Error - from ._models_py3 import ErrorAutoGenerated from ._models_py3 import ErrorDetails + from ._models_py3 import ErrorError from ._models_py3 import ErrorResponse from ._models_py3 import EventHubDataSourceProperties from ._models_py3 import EventHubOutputDataSource @@ -97,10 +97,19 @@ from ._models_py3 import PrivateLinkConnectionState from ._models_py3 import PrivateLinkServiceConnection from ._models_py3 import ProxyResource + from ._models_py3 import QueryCompilationErrors + from ._models_py3 import QueryCompilationResult + from ._models_py3 import QueryTestingResult + from ._models_py3 import RawOutputDatasource + from ._models_py3 import RawReferenceInputDataSource + from ._models_py3 import RawStreamInputDataSource from ._models_py3 import ReferenceInputDataSource from ._models_py3 import ReferenceInputProperties from ._models_py3 import Resource from ._models_py3 import ResourceTestStatus + from ._models_py3 import SampleInput + from ._models_py3 import SampleInputResult + from ._models_py3 import SampleInputSerialization from ._models_py3 import ScalarFunctionProperties from ._models_py3 import Serialization from ._models_py3 import ServiceBusDataSourceProperties @@ -118,6 +127,9 @@ from ._models_py3 import SubResource from ._models_py3 import SubscriptionQuota from ._models_py3 import SubscriptionQuotasListResult + from ._models_py3 import TestDatasourceResult + from ._models_py3 import TestInput + from ._models_py3 import TestOutput from ._models_py3 import TrackedResource from ._models_py3 import Transformation except (SyntaxError, ImportError): @@ -168,8 +180,8 @@ from ._models import Diagnostics # type: ignore from ._models import DocumentDbOutputDataSource # type: ignore from ._models import Error # type: ignore - from ._models import ErrorAutoGenerated # type: ignore from ._models import ErrorDetails # type: ignore + from ._models import ErrorError # type: ignore from ._models import ErrorResponse # type: ignore from ._models import EventHubDataSourceProperties # type: ignore from ._models import EventHubOutputDataSource # type: ignore @@ -211,10 +223,19 @@ from ._models import PrivateLinkConnectionState # type: ignore from ._models import PrivateLinkServiceConnection # type: ignore from ._models import ProxyResource # type: ignore + from ._models import QueryCompilationErrors # type: ignore + from ._models import QueryCompilationResult # type: ignore + from ._models import QueryTestingResult # type: ignore + from ._models import RawOutputDatasource # type: ignore + from ._models import RawReferenceInputDataSource # type: ignore + from ._models import RawStreamInputDataSource # type: ignore from ._models import ReferenceInputDataSource # type: ignore from ._models import ReferenceInputProperties # type: ignore from ._models import Resource # type: ignore from ._models import ResourceTestStatus # type: ignore + from ._models import SampleInput # type: ignore + from ._models import SampleInputResult # type: ignore + from ._models import SampleInputSerialization # type: ignore from ._models import ScalarFunctionProperties # type: ignore from ._models import Serialization # type: ignore from ._models import ServiceBusDataSourceProperties # type: ignore @@ -232,6 +253,9 @@ from ._models import SubResource # type: ignore from ._models import SubscriptionQuota # type: ignore from ._models import SubscriptionQuotasListResult # type: ignore + from ._models import TestDatasourceResult # type: ignore + from ._models import TestInput # type: ignore + from ._models import TestOutput # type: ignore from ._models import TrackedResource # type: ignore from ._models import Transformation # type: ignore @@ -249,7 +273,10 @@ JsonOutputSerializationFormat, OutputErrorPolicy, OutputStartMode, + QueryTestingResultStatus, + SampleInputResultStatus, StreamingJobSkuName, + TestDatasourceResultStatus, ) __all__ = [ @@ -300,8 +327,8 @@ 'Diagnostics', 'DocumentDbOutputDataSource', 'Error', - 'ErrorAutoGenerated', 'ErrorDetails', + 'ErrorError', 'ErrorResponse', 'EventHubDataSourceProperties', 'EventHubOutputDataSource', @@ -343,10 +370,19 @@ 'PrivateLinkConnectionState', 'PrivateLinkServiceConnection', 'ProxyResource', + 'QueryCompilationErrors', + 'QueryCompilationResult', + 'QueryTestingResult', + 'RawOutputDatasource', + 'RawReferenceInputDataSource', + 'RawStreamInputDataSource', 'ReferenceInputDataSource', 'ReferenceInputProperties', 'Resource', 'ResourceTestStatus', + 'SampleInput', + 'SampleInputResult', + 'SampleInputSerialization', 'ScalarFunctionProperties', 'Serialization', 'ServiceBusDataSourceProperties', @@ -364,6 +400,9 @@ 'SubResource', 'SubscriptionQuota', 'SubscriptionQuotasListResult', + 'TestDatasourceResult', + 'TestInput', + 'TestOutput', 'TrackedResource', 'Transformation', 'AuthenticationMode', @@ -379,5 +418,8 @@ 'JsonOutputSerializationFormat', 'OutputErrorPolicy', 'OutputStartMode', + 'QueryTestingResultStatus', + 'SampleInputResultStatus', 'StreamingJobSkuName', + 'TestDatasourceResultStatus', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py index 100ff571855a..c7a9503fc12b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py @@ -23,8 +23,8 @@ class FunctionProperties(msrest.serialization.Model): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -74,8 +74,8 @@ class AggregateFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -152,7 +152,7 @@ class AvroSerialization(Serialization): :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -177,7 +177,7 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource, RawOutputDatasource. All required parameters must be populated in order to send to Azure. @@ -195,7 +195,7 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource', 'Raw': 'RawOutputDatasource'} } def __init__( @@ -554,8 +554,6 @@ def __init__( class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -563,13 +561,13 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning web service. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -578,8 +576,6 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs @@ -587,6 +583,7 @@ def __init__( super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.MachineLearningServices' # type: str self.execute_endpoint = kwargs.get('execute_endpoint', None) + self.udf_type = kwargs.get('udf_type', None) class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): @@ -676,8 +673,8 @@ class AzureMachineLearningStudioFunctionBinding(FunctionBinding): :param type: Required. Indicates the function binding type.Constant filled by server. :type type: str :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. + Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type endpoint: str :param api_key: The API key used to authenticate with Request-Response endpoint. :type api_key: str @@ -720,23 +717,21 @@ def __init__( class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. + Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -745,8 +740,6 @@ class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(Func 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs @@ -754,6 +747,7 @@ def __init__( super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str self.execute_endpoint = kwargs.get('execute_endpoint', None) + self.udf_type = kwargs.get('udf_type', None) class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): @@ -762,8 +756,8 @@ class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): :param name: The name of the input column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str :param map_to: The zero based index of the function parameter this input maps to. :type map_to: int @@ -815,8 +809,8 @@ class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): :param name: The name of the output column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str """ @@ -1006,7 +1000,7 @@ class ReferenceInputDataSource(msrest.serialization.Model): """Describes an input data source that contains reference data. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource. + sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource, RawReferenceInputDataSource. All required parameters must be populated in order to send to Azure. @@ -1024,7 +1018,7 @@ class ReferenceInputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource'} + 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource', 'Raw': 'RawReferenceInputDataSource'} } def __init__( @@ -1327,10 +1321,10 @@ class BlobDataSourceProperties(msrest.serialization.Model): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1377,10 +1371,10 @@ class BlobOutputDataSource(OutputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1433,10 +1427,10 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1483,10 +1477,10 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1534,10 +1528,10 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1566,7 +1560,7 @@ class StreamInputDataSource(msrest.serialization.Model): """Describes an input data source that contains stream data. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource. + sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource, RawStreamInputDataSource. All required parameters must be populated in order to send to Azure. @@ -1584,7 +1578,7 @@ class StreamInputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource'} + 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource', 'Raw': 'RawStreamInputDataSource'} } def __init__( @@ -1612,10 +1606,10 @@ class BlobStreamInputDataSource(StreamInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1668,10 +1662,10 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1702,7 +1696,7 @@ def __init__( class Resource(msrest.serialization.Model): - """Resource. + """The base resource definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -2101,21 +2095,19 @@ def __init__( class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a CSharp function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param script: The CSharp code containing a single function definition. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -2124,8 +2116,6 @@ class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultD 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs @@ -2133,6 +2123,7 @@ def __init__( super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str self.script = kwargs.get('script', None) + self.udf_type = kwargs.get('udf_type', None) class CsvSerialization(Serialization): @@ -2145,9 +2136,10 @@ class CsvSerialization(Serialization): "Csv", "Avro", "Json", "CustomClr", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. :type field_delimiter: str :param encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -2292,9 +2284,9 @@ class DocumentDbOutputDataSource(OutputDataSource): :type database: str :param collection_name_pattern: The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. :type collection_name_pattern: str :param partition_key: The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} @@ -2337,11 +2329,11 @@ class Error(msrest.serialization.Model): """Common error representation. :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + :type error: ~stream_analytics_management_client.models.ErrorError """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + 'error': {'key': 'error', 'type': 'ErrorError'}, } def __init__( @@ -2352,62 +2344,62 @@ def __init__( self.error = kwargs.get('error', None) -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. :param code: Error code. :type code: str - :param message: Error message. - :type message: str :param target: Error target. :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] + :param message: Error message. + :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ErrorAutoGenerated, self).__init__(**kwargs) + super(ErrorDetails, self).__init__(**kwargs) self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + self.message = kwargs.get('message', None) -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. +class ErrorError(msrest.serialization.Model): + """Error definition properties. :param code: Error code. :type code: str - :param target: Error target. - :type target: str :param message: Error message. :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, } def __init__( self, **kwargs ): - super(ErrorDetails, self).__init__(**kwargs) + super(ErrorError, self).__init__(**kwargs) self.code = kwargs.get('code', None) - self.target = kwargs.get('target', None) self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) class ErrorResponse(msrest.serialization.Model): @@ -2982,8 +2974,8 @@ class FunctionOutput(msrest.serialization.Model): """Describes the output of a function. :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. :type data_type: str """ @@ -3239,8 +3231,6 @@ def __init__( class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a JavaScript function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -3248,13 +3238,13 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa :param script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -3263,8 +3253,6 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs @@ -3272,6 +3260,7 @@ def __init__( super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = kwargs.get('script', None) + self.udf_type = kwargs.get('udf_type', None) class StorageAccount(msrest.serialization.Model): @@ -3575,7 +3564,7 @@ class ParquetSerialization(Serialization): :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -3722,7 +3711,41 @@ def __init__( self.authentication_mode = kwargs.get('authentication_mode', None) -class PrivateEndpoint(Resource): +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -3901,38 +3924,240 @@ def __init__( self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. +class QueryCompilationErrors(msrest.serialization.Model): + """The errors produced by the compiler. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str + :ivar message: The content of the error message. + :vartype message: str + :ivar start_line: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype start_line: int + :ivar start_column: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype start_column: int + :ivar end_line: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype end_line: int + :ivar end_column: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype end_column: int + :ivar is_global: Whether the error is not for a specific part but for the entire query. + :vartype is_global: bool """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + 'message': {'readonly': True}, + 'start_line': {'readonly': True}, + 'start_column': {'readonly': True}, + 'end_line': {'readonly': True}, + 'end_column': {'readonly': True}, + 'is_global': {'readonly': True}, + } + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'start_line': {'key': 'startLine', 'type': 'int'}, + 'start_column': {'key': 'startColumn', 'type': 'int'}, + 'end_line': {'key': 'endLine', 'type': 'int'}, + 'end_column': {'key': 'endColumn', 'type': 'int'}, + 'is_global': {'key': 'isGlobal', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryCompilationErrors, self).__init__(**kwargs) + self.message = None + self.start_line = None + self.start_column = None + self.end_line = None + self.end_column = None + self.is_global = None + + +class QueryCompilationResult(msrest.serialization.Model): + """The result of the query compilation request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param errors: The errors produced by the compiler. + :type errors: ~stream_analytics_management_client.models.QueryCompilationErrors + :ivar warnings: Warning messages produced by the compiler. + :vartype warnings: list[str] + :ivar inputs: All input names used by the query. + :vartype inputs: list[str] + :ivar outputs: All output names used by the query. + :vartype outputs: list[str] + :ivar functions: All function names used by the query. + :vartype functions: list[str] + """ + + _validation = { + 'warnings': {'readonly': True}, + 'inputs': {'readonly': True}, + 'outputs': {'readonly': True}, + 'functions': {'readonly': True}, + } + + _attribute_map = { + 'errors': {'key': 'errors', 'type': 'QueryCompilationErrors'}, + 'warnings': {'key': 'warnings', 'type': '[str]'}, + 'inputs': {'key': 'inputs', 'type': '[str]'}, + 'outputs': {'key': 'outputs', 'type': '[str]'}, + 'functions': {'key': 'functions', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryCompilationResult, self).__init__(**kwargs) + self.errors = kwargs.get('errors', None) + self.warnings = None + self.inputs = None + self.outputs = None + self.functions = None + + +class QueryTestingResult(Error): + """The result of the query testing request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the query testing request. Possible values include: "Started", + "Success", "CompilerError", "RuntimeError", "Timeout", "UnknownError". + :vartype status: str or ~stream_analytics_management_client.models.QueryTestingResultStatus + :ivar output_uri: The SAS URL to the outputs payload. + :vartype output_uri: str + """ + + _validation = { + 'status': {'readonly': True}, + 'output_uri': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + 'output_uri': {'key': 'outputUri', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryTestingResult, self).__init__(**kwargs) + self.status = None + self.output_uri = None + + +class RawOutputDatasource(OutputDataSource): + """Describes a raw output data source. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an output of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload_uri: The SAS URL to a blob where the output should be written. If this property + is not set, output data will be written into a temporary storage, and a SAS URL to that + temporary storage will be included in the result. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ProxyResource, self).__init__(**kwargs) + super(RawOutputDatasource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload_uri = kwargs.get('payload_uri', None) + + +class RawReferenceInputDataSource(ReferenceInputDataSource): + """Describes a raw input data source that contains reference data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload: The JSON serialized content of the input data. Either payload or payloadUri + must be set, but not both. + :type payload: str + :param payload_uri: The SAS URL to a blob containing the JSON serialized content of the input + data. Either payload or payloadUri must be set, but not both. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'payload': {'key': 'properties.payload', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(RawReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload = kwargs.get('payload', None) + self.payload_uri = kwargs.get('payload_uri', None) + + +class RawStreamInputDataSource(StreamInputDataSource): + """Describes a raw input data source that contains stream data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload: The JSON serialized content of the input data. Either payload or payloadUri + must be set, but not both. + :type payload: str + :param payload_uri: The SAS URL to a blob containing the JSON serialized content of the input + data. Either payload or payloadUri must be set, but not both. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'payload': {'key': 'properties.payload', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(RawStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload = kwargs.get('payload', None) + self.payload_uri = kwargs.get('payload_uri', None) class ReferenceInputProperties(InputProperties): @@ -4020,6 +4245,134 @@ def __init__( self.error = None +class SampleInput(msrest.serialization.Model): + """The stream analytics input to sample. + + :param input: The stream analytics input to sample. + :type input: ~stream_analytics_management_client.models.Input + :param event_start_time: The start time for the data to sample. This is in DateTime format. + :type event_start_time: str + :param event_end_time: The end time for the data to sample. This is of DateTime format. + :type event_end_time: str + :param serialization: The serialization format for the sampled output data saved to the storage + (not for the input format). + :type serialization: ~stream_analytics_management_client.models.SampleInputSerialization + :param number_of_events: The max number of events to read. Defaults to 1000. + :type number_of_events: int + :param max_time_to_read_in_seconds: The max time service should wait to read events in seconds. + Defaults to 30. If this time is reached, service will stop reading and provide with whatever + data has been read. However, this does not apply when no data is read, which waits for a + maximum of 90 seconds. + :type max_time_to_read_in_seconds: int + :param partition_count: When the caller already knows the input partition count, passing in + this value accelerates the operation so server would not go get the partition count. + :type partition_count: int + :param compatibility_level: Defaults to the default ASA job compatibility level. Today it is + 1.2. + :type compatibility_level: str + :param events_url: The SAS URI of the storage blob for service to write the sampled events to. + If this parameter is not provided, service will write events to he system account and share a + temporary SAS URI to it. + :type events_url: str + :param data_locale: Defaults to en-US. + :type data_locale: str + """ + + _attribute_map = { + 'input': {'key': 'input', 'type': 'Input'}, + 'event_start_time': {'key': 'eventStartTime', 'type': 'str'}, + 'event_end_time': {'key': 'eventEndTime', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'SampleInputSerialization'}, + 'number_of_events': {'key': 'numberOfEvents', 'type': 'int'}, + 'max_time_to_read_in_seconds': {'key': 'maxTimeToReadInSeconds', 'type': 'int'}, + 'partition_count': {'key': 'partitionCount', 'type': 'int'}, + 'compatibility_level': {'key': 'compatibilityLevel', 'type': 'str'}, + 'events_url': {'key': 'eventsUrl', 'type': 'str'}, + 'data_locale': {'key': 'dataLocale', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SampleInput, self).__init__(**kwargs) + self.input = kwargs.get('input', None) + self.event_start_time = kwargs.get('event_start_time', None) + self.event_end_time = kwargs.get('event_end_time', None) + self.serialization = kwargs.get('serialization', None) + self.number_of_events = kwargs.get('number_of_events', None) + self.max_time_to_read_in_seconds = kwargs.get('max_time_to_read_in_seconds', None) + self.partition_count = kwargs.get('partition_count', None) + self.compatibility_level = kwargs.get('compatibility_level', None) + self.events_url = kwargs.get('events_url', None) + self.data_locale = kwargs.get('data_locale', None) + + +class SampleInputResult(Error): + """The result of the sample input request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the sample input request. Possible values include: + "ReadAllEventsInRange", "NoEventsFoundInRange", "ErrorConnectingToInput". + :vartype status: str or ~stream_analytics_management_client.models.SampleInputResultStatus + :ivar diagnostics: Diagnostics messages. E.g. message indicating some partitions from the input + have no data. + :vartype diagnostics: list[str] + :ivar events_download_url: A SAS URL to download the sampled input data. + :vartype events_download_url: list[str] + :ivar last_arrival_time: The timestamp for the last event in the data. It is in DateTime + format. + :vartype last_arrival_time: str + """ + + _validation = { + 'status': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'events_download_url': {'readonly': True}, + 'last_arrival_time': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + 'diagnostics': {'key': 'diagnostics', 'type': '[str]'}, + 'events_download_url': {'key': 'eventsDownloadUrl', 'type': '[str]'}, + 'last_arrival_time': {'key': 'lastArrivalTime', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SampleInputResult, self).__init__(**kwargs) + self.status = None + self.diagnostics = None + self.events_download_url = None + self.last_arrival_time = None + + +class SampleInputSerialization(msrest.serialization.Model): + """The serialization format for the sampled output data saved to the storage (not for the input format). + + :param type: The serialization format. + :type type: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SampleInputSerialization, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + + class ScalarFunctionProperties(FunctionProperties): """The properties that are associated with a scalar function. @@ -4030,8 +4383,8 @@ class ScalarFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -4369,9 +4722,9 @@ class StreamingJob(TrackedResource): have a value of -1. :type events_late_arrival_max_delay_in_seconds: int :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. + supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. :type data_locale: str :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible values include: "1.0". @@ -4398,16 +4751,16 @@ class StreamingJob(TrackedResource): transformation. :type functions: list[~stream_analytics_management_client.models.Function] :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param job_storage_account: The properties that are associated with an Azure Storage account with MSI. :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount - :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + :param content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Possible values include: "SystemAccount", "JobStorageAccount". - :vartype content_storage_policy: str or + :type content_storage_policy: str or ~stream_analytics_management_client.models.ContentStoragePolicy :param externals: The storage account where the custom code artifacts are located. :type externals: ~stream_analytics_management_client.models.External @@ -4425,7 +4778,6 @@ class StreamingJob(TrackedResource): 'last_output_event_time': {'readonly': True}, 'created_date': {'readonly': True}, 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, } _attribute_map = { @@ -4488,7 +4840,7 @@ def __init__( self.functions = kwargs.get('functions', None) self.etag = None self.job_storage_account = kwargs.get('job_storage_account', None) - self.content_storage_policy = None + self.content_storage_policy = kwargs.get('content_storage_policy', None) self.externals = kwargs.get('externals', None) self.cluster = kwargs.get('cluster', None) @@ -4664,6 +5016,73 @@ def __init__( self.value = None +class TestDatasourceResult(Error): + """The result of the test input or output request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the sample output request. Possible values include: + "TestSucceeded", "TestFailed". + :vartype status: str or ~stream_analytics_management_client.models.TestDatasourceResultStatus + """ + + _validation = { + 'status': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TestDatasourceResult, self).__init__(**kwargs) + self.status = None + + +class TestInput(msrest.serialization.Model): + """A stream analytics input object. + + :param input: The stream analytics input to test. + :type input: ~stream_analytics_management_client.models.Input + """ + + _attribute_map = { + 'input': {'key': 'input', 'type': 'Input'}, + } + + def __init__( + self, + **kwargs + ): + super(TestInput, self).__init__(**kwargs) + self.input = kwargs.get('input', None) + + +class TestOutput(msrest.serialization.Model): + """A stream analytics output object. + + :param output: The stream analytics output to test. + :type output: ~stream_analytics_management_client.models.Output + """ + + _attribute_map = { + 'output': {'key': 'output', 'type': 'Output'}, + } + + def __init__( + self, + **kwargs + ): + super(TestOutput, self).__init__(**kwargs) + self.output = kwargs.get('output', None) + + class Transformation(SubResource): """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index aa07f713986e..b77f8c641199 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -28,8 +28,8 @@ class FunctionProperties(msrest.serialization.Model): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -83,8 +83,8 @@ class AggregateFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -165,7 +165,7 @@ class AvroSerialization(Serialization): :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -180,7 +180,7 @@ class AvroSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): super(AvroSerialization, self).__init__(**kwargs) @@ -192,7 +192,7 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource, RawOutputDatasource. All required parameters must be populated in order to send to Azure. @@ -210,7 +210,7 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource', 'Raw': 'RawOutputDatasource'} } def __init__( @@ -606,8 +606,6 @@ def __init__( class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -615,13 +613,13 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning web service. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -630,17 +628,17 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, execute_endpoint: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.MachineLearningServices' # type: str self.execute_endpoint = execute_endpoint + self.udf_type = udf_type class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): @@ -741,8 +739,8 @@ class AzureMachineLearningStudioFunctionBinding(FunctionBinding): :param type: Required. Indicates the function binding type.Constant filled by server. :type type: str :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. + Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type endpoint: str :param api_key: The API key used to authenticate with Request-Response endpoint. :type api_key: str @@ -791,23 +789,21 @@ def __init__( class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. + Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -816,17 +812,17 @@ class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(Func 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, execute_endpoint: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str self.execute_endpoint = execute_endpoint + self.udf_type = udf_type class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): @@ -835,8 +831,8 @@ class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): :param name: The name of the input column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str :param map_to: The zero based index of the function parameter this input maps to. :type map_to: int @@ -895,8 +891,8 @@ class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): :param name: The name of the output column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str """ @@ -1116,7 +1112,7 @@ class ReferenceInputDataSource(msrest.serialization.Model): """Describes an input data source that contains reference data. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource. + sub-classes are: AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource, RawReferenceInputDataSource. All required parameters must be populated in order to send to Azure. @@ -1134,7 +1130,7 @@ class ReferenceInputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource'} + 'type': {'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource', 'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource', 'Raw': 'RawReferenceInputDataSource'} } def __init__( @@ -1475,10 +1471,10 @@ class BlobDataSourceProperties(msrest.serialization.Model): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1531,10 +1527,10 @@ class BlobOutputDataSource(OutputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1594,10 +1590,10 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1651,10 +1647,10 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1708,10 +1704,10 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1746,7 +1742,7 @@ class StreamInputDataSource(msrest.serialization.Model): """Describes an input data source that contains stream data. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource. + sub-classes are: IoTHubStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource, RawStreamInputDataSource. All required parameters must be populated in order to send to Azure. @@ -1764,7 +1760,7 @@ class StreamInputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource'} + 'type': {'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.Storage/Blob': 'BlobStreamInputDataSource', 'Raw': 'RawStreamInputDataSource'} } def __init__( @@ -1792,10 +1788,10 @@ class BlobStreamInputDataSource(StreamInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1855,10 +1851,10 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1896,7 +1892,7 @@ def __init__( class Resource(msrest.serialization.Model): - """Resource. + """The base resource definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -2315,21 +2311,19 @@ def __init__( class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a CSharp function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param script: The CSharp code containing a single function definition. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -2338,17 +2332,17 @@ class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultD 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, script: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str self.script = script + self.udf_type = udf_type class CsvSerialization(Serialization): @@ -2361,9 +2355,10 @@ class CsvSerialization(Serialization): "Csv", "Avro", "Json", "CustomClr", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. :type field_delimiter: str :param encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -2514,9 +2509,9 @@ class DocumentDbOutputDataSource(OutputDataSource): :type database: str :param collection_name_pattern: The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. :type collection_name_pattern: str :param partition_key: The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} @@ -2566,88 +2561,88 @@ class Error(msrest.serialization.Model): """Common error representation. :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + :type error: ~stream_analytics_management_client.models.ErrorError """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + 'error': {'key': 'error', 'type': 'ErrorError'}, } def __init__( self, *, - error: Optional["ErrorAutoGenerated"] = None, + error: Optional["ErrorError"] = None, **kwargs ): super(Error, self).__init__(**kwargs) self.error = error -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. :param code: Error code. :type code: str - :param message: Error message. - :type message: str :param target: Error target. :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] + :param message: Error message. + :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, - message: Optional[str] = None, target: Optional[str] = None, - details: Optional[List["ErrorDetails"]] = None, + message: Optional[str] = None, **kwargs ): - super(ErrorAutoGenerated, self).__init__(**kwargs) + super(ErrorDetails, self).__init__(**kwargs) self.code = code - self.message = message self.target = target - self.details = details + self.message = message -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. +class ErrorError(msrest.serialization.Model): + """Error definition properties. :param code: Error code. :type code: str - :param target: Error target. - :type target: str :param message: Error message. :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, } def __init__( self, *, code: Optional[str] = None, - target: Optional[str] = None, message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorDetails"]] = None, **kwargs ): - super(ErrorDetails, self).__init__(**kwargs) + super(ErrorError, self).__init__(**kwargs) self.code = code - self.target = target self.message = message + self.target = target + self.details = details class ErrorResponse(msrest.serialization.Model): @@ -3290,8 +3285,8 @@ class FunctionOutput(msrest.serialization.Model): """Describes the output of a function. :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. :type data_type: str """ @@ -3568,8 +3563,6 @@ def __init__( class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a JavaScript function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -3577,13 +3570,13 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa :param script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -3592,17 +3585,17 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, script: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = script + self.udf_type = udf_type class StorageAccount(msrest.serialization.Model): @@ -3922,7 +3915,7 @@ class ParquetSerialization(Serialization): :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -3937,7 +3930,7 @@ class ParquetSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): super(ParquetSerialization, self).__init__(**kwargs) @@ -4089,7 +4082,41 @@ def __init__( self.authentication_mode = authentication_mode -class PrivateEndpoint(Resource): +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -4277,38 +4304,252 @@ def __init__( self.private_link_service_connection_state = private_link_service_connection_state -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. +class QueryCompilationErrors(msrest.serialization.Model): + """The errors produced by the compiler. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str + :ivar message: The content of the error message. + :vartype message: str + :ivar start_line: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype start_line: int + :ivar start_column: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype start_column: int + :ivar end_line: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype end_line: int + :ivar end_column: Describes the error location in the original query. Not set if isGlobal is + true. + :vartype end_column: int + :ivar is_global: Whether the error is not for a specific part but for the entire query. + :vartype is_global: bool """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + 'message': {'readonly': True}, + 'start_line': {'readonly': True}, + 'start_column': {'readonly': True}, + 'end_line': {'readonly': True}, + 'end_column': {'readonly': True}, + 'is_global': {'readonly': True}, + } + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'start_line': {'key': 'startLine', 'type': 'int'}, + 'start_column': {'key': 'startColumn', 'type': 'int'}, + 'end_line': {'key': 'endLine', 'type': 'int'}, + 'end_column': {'key': 'endColumn', 'type': 'int'}, + 'is_global': {'key': 'isGlobal', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryCompilationErrors, self).__init__(**kwargs) + self.message = None + self.start_line = None + self.start_column = None + self.end_line = None + self.end_column = None + self.is_global = None + + +class QueryCompilationResult(msrest.serialization.Model): + """The result of the query compilation request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param errors: The errors produced by the compiler. + :type errors: ~stream_analytics_management_client.models.QueryCompilationErrors + :ivar warnings: Warning messages produced by the compiler. + :vartype warnings: list[str] + :ivar inputs: All input names used by the query. + :vartype inputs: list[str] + :ivar outputs: All output names used by the query. + :vartype outputs: list[str] + :ivar functions: All function names used by the query. + :vartype functions: list[str] + """ + + _validation = { + 'warnings': {'readonly': True}, + 'inputs': {'readonly': True}, + 'outputs': {'readonly': True}, + 'functions': {'readonly': True}, + } + + _attribute_map = { + 'errors': {'key': 'errors', 'type': 'QueryCompilationErrors'}, + 'warnings': {'key': 'warnings', 'type': '[str]'}, + 'inputs': {'key': 'inputs', 'type': '[str]'}, + 'outputs': {'key': 'outputs', 'type': '[str]'}, + 'functions': {'key': 'functions', 'type': '[str]'}, + } + + def __init__( + self, + *, + errors: Optional["QueryCompilationErrors"] = None, + **kwargs + ): + super(QueryCompilationResult, self).__init__(**kwargs) + self.errors = errors + self.warnings = None + self.inputs = None + self.outputs = None + self.functions = None + + +class QueryTestingResult(Error): + """The result of the query testing request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the query testing request. Possible values include: "Started", + "Success", "CompilerError", "RuntimeError", "Timeout", "UnknownError". + :vartype status: str or ~stream_analytics_management_client.models.QueryTestingResultStatus + :ivar output_uri: The SAS URL to the outputs payload. + :vartype output_uri: str + """ + + _validation = { + 'status': {'readonly': True}, + 'output_uri': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + 'output_uri': {'key': 'outputUri', 'type': 'str'}, + } + + def __init__( + self, + *, + error: Optional["ErrorError"] = None, + **kwargs + ): + super(QueryTestingResult, self).__init__(error=error, **kwargs) + self.status = None + self.output_uri = None + + +class RawOutputDatasource(OutputDataSource): + """Describes a raw output data source. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an output of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload_uri: The SAS URL to a blob where the output should be written. If this property + is not set, output data will be written into a temporary storage, and a SAS URL to that + temporary storage will be included in the result. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, } def __init__( self, + *, + payload_uri: Optional[str] = None, **kwargs ): - super(ProxyResource, self).__init__(**kwargs) + super(RawOutputDatasource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload_uri = payload_uri + + +class RawReferenceInputDataSource(ReferenceInputDataSource): + """Describes a raw input data source that contains reference data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing reference data. + Required on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload: The JSON serialized content of the input data. Either payload or payloadUri + must be set, but not both. + :type payload: str + :param payload_uri: The SAS URL to a blob containing the JSON serialized content of the input + data. Either payload or payloadUri must be set, but not both. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'payload': {'key': 'properties.payload', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, + } + + def __init__( + self, + *, + payload: Optional[str] = None, + payload_uri: Optional[str] = None, + **kwargs + ): + super(RawReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload = payload + self.payload_uri = payload_uri + + +class RawStreamInputDataSource(StreamInputDataSource): + """Describes a raw input data source that contains stream data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of input data source containing stream data. Required + on PUT (CreateOrReplace) requests.Constant filled by server. + :type type: str + :param payload: The JSON serialized content of the input data. Either payload or payloadUri + must be set, but not both. + :type payload: str + :param payload_uri: The SAS URL to a blob containing the JSON serialized content of the input + data. Either payload or payloadUri must be set, but not both. + :type payload_uri: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'payload': {'key': 'properties.payload', 'type': 'str'}, + 'payload_uri': {'key': 'properties.payloadUri', 'type': 'str'}, + } + + def __init__( + self, + *, + payload: Optional[str] = None, + payload_uri: Optional[str] = None, + **kwargs + ): + super(RawStreamInputDataSource, self).__init__(**kwargs) + self.type = 'Raw' # type: str + self.payload = payload + self.payload_uri = payload_uri class ReferenceInputProperties(InputProperties): @@ -4401,6 +4642,149 @@ def __init__( self.error = None +class SampleInput(msrest.serialization.Model): + """The stream analytics input to sample. + + :param input: The stream analytics input to sample. + :type input: ~stream_analytics_management_client.models.Input + :param event_start_time: The start time for the data to sample. This is in DateTime format. + :type event_start_time: str + :param event_end_time: The end time for the data to sample. This is of DateTime format. + :type event_end_time: str + :param serialization: The serialization format for the sampled output data saved to the storage + (not for the input format). + :type serialization: ~stream_analytics_management_client.models.SampleInputSerialization + :param number_of_events: The max number of events to read. Defaults to 1000. + :type number_of_events: int + :param max_time_to_read_in_seconds: The max time service should wait to read events in seconds. + Defaults to 30. If this time is reached, service will stop reading and provide with whatever + data has been read. However, this does not apply when no data is read, which waits for a + maximum of 90 seconds. + :type max_time_to_read_in_seconds: int + :param partition_count: When the caller already knows the input partition count, passing in + this value accelerates the operation so server would not go get the partition count. + :type partition_count: int + :param compatibility_level: Defaults to the default ASA job compatibility level. Today it is + 1.2. + :type compatibility_level: str + :param events_url: The SAS URI of the storage blob for service to write the sampled events to. + If this parameter is not provided, service will write events to he system account and share a + temporary SAS URI to it. + :type events_url: str + :param data_locale: Defaults to en-US. + :type data_locale: str + """ + + _attribute_map = { + 'input': {'key': 'input', 'type': 'Input'}, + 'event_start_time': {'key': 'eventStartTime', 'type': 'str'}, + 'event_end_time': {'key': 'eventEndTime', 'type': 'str'}, + 'serialization': {'key': 'serialization', 'type': 'SampleInputSerialization'}, + 'number_of_events': {'key': 'numberOfEvents', 'type': 'int'}, + 'max_time_to_read_in_seconds': {'key': 'maxTimeToReadInSeconds', 'type': 'int'}, + 'partition_count': {'key': 'partitionCount', 'type': 'int'}, + 'compatibility_level': {'key': 'compatibilityLevel', 'type': 'str'}, + 'events_url': {'key': 'eventsUrl', 'type': 'str'}, + 'data_locale': {'key': 'dataLocale', 'type': 'str'}, + } + + def __init__( + self, + *, + input: Optional["Input"] = None, + event_start_time: Optional[str] = None, + event_end_time: Optional[str] = None, + serialization: Optional["SampleInputSerialization"] = None, + number_of_events: Optional[int] = None, + max_time_to_read_in_seconds: Optional[int] = None, + partition_count: Optional[int] = None, + compatibility_level: Optional[str] = None, + events_url: Optional[str] = None, + data_locale: Optional[str] = None, + **kwargs + ): + super(SampleInput, self).__init__(**kwargs) + self.input = input + self.event_start_time = event_start_time + self.event_end_time = event_end_time + self.serialization = serialization + self.number_of_events = number_of_events + self.max_time_to_read_in_seconds = max_time_to_read_in_seconds + self.partition_count = partition_count + self.compatibility_level = compatibility_level + self.events_url = events_url + self.data_locale = data_locale + + +class SampleInputResult(Error): + """The result of the sample input request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the sample input request. Possible values include: + "ReadAllEventsInRange", "NoEventsFoundInRange", "ErrorConnectingToInput". + :vartype status: str or ~stream_analytics_management_client.models.SampleInputResultStatus + :ivar diagnostics: Diagnostics messages. E.g. message indicating some partitions from the input + have no data. + :vartype diagnostics: list[str] + :ivar events_download_url: A SAS URL to download the sampled input data. + :vartype events_download_url: list[str] + :ivar last_arrival_time: The timestamp for the last event in the data. It is in DateTime + format. + :vartype last_arrival_time: str + """ + + _validation = { + 'status': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'events_download_url': {'readonly': True}, + 'last_arrival_time': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + 'diagnostics': {'key': 'diagnostics', 'type': '[str]'}, + 'events_download_url': {'key': 'eventsDownloadUrl', 'type': '[str]'}, + 'last_arrival_time': {'key': 'lastArrivalTime', 'type': 'str'}, + } + + def __init__( + self, + *, + error: Optional["ErrorError"] = None, + **kwargs + ): + super(SampleInputResult, self).__init__(error=error, **kwargs) + self.status = None + self.diagnostics = None + self.events_download_url = None + self.last_arrival_time = None + + +class SampleInputSerialization(msrest.serialization.Model): + """The serialization format for the sampled output data saved to the storage (not for the input format). + + :param type: The serialization format. + :type type: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + **kwargs + ): + super(SampleInputSerialization, self).__init__(**kwargs) + self.type = type + + class ScalarFunctionProperties(FunctionProperties): """The properties that are associated with a scalar function. @@ -4411,8 +4795,8 @@ class ScalarFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param inputs: :type inputs: list[~stream_analytics_management_client.models.FunctionInput] @@ -4789,9 +5173,9 @@ class StreamingJob(TrackedResource): have a value of -1. :type events_late_arrival_max_delay_in_seconds: int :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. + supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. :type data_locale: str :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible values include: "1.0". @@ -4818,16 +5202,16 @@ class StreamingJob(TrackedResource): transformation. :type functions: list[~stream_analytics_management_client.models.Function] :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param job_storage_account: The properties that are associated with an Azure Storage account with MSI. :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount - :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + :param content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Possible values include: "SystemAccount", "JobStorageAccount". - :vartype content_storage_policy: str or + :type content_storage_policy: str or ~stream_analytics_management_client.models.ContentStoragePolicy :param externals: The storage account where the custom code artifacts are located. :type externals: ~stream_analytics_management_client.models.External @@ -4845,7 +5229,6 @@ class StreamingJob(TrackedResource): 'last_output_event_time': {'readonly': True}, 'created_date': {'readonly': True}, 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, } _attribute_map = { @@ -4902,6 +5285,7 @@ def __init__( outputs: Optional[List["Output"]] = None, functions: Optional[List["Function"]] = None, job_storage_account: Optional["JobStorageAccount"] = None, + content_storage_policy: Optional[Union[str, "ContentStoragePolicy"]] = None, externals: Optional["External"] = None, cluster: Optional["ClusterInfo"] = None, **kwargs @@ -4929,7 +5313,7 @@ def __init__( self.functions = functions self.etag = None self.job_storage_account = job_storage_account - self.content_storage_policy = None + self.content_storage_policy = content_storage_policy self.externals = externals self.cluster = cluster @@ -5114,6 +5498,79 @@ def __init__( self.value = None +class TestDatasourceResult(Error): + """The result of the test input or output request. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param error: Error definition properties. + :type error: ~stream_analytics_management_client.models.ErrorError + :ivar status: The status of the sample output request. Possible values include: + "TestSucceeded", "TestFailed". + :vartype status: str or ~stream_analytics_management_client.models.TestDatasourceResultStatus + """ + + _validation = { + 'status': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + *, + error: Optional["ErrorError"] = None, + **kwargs + ): + super(TestDatasourceResult, self).__init__(error=error, **kwargs) + self.status = None + + +class TestInput(msrest.serialization.Model): + """A stream analytics input object. + + :param input: The stream analytics input to test. + :type input: ~stream_analytics_management_client.models.Input + """ + + _attribute_map = { + 'input': {'key': 'input', 'type': 'Input'}, + } + + def __init__( + self, + *, + input: Optional["Input"] = None, + **kwargs + ): + super(TestInput, self).__init__(**kwargs) + self.input = input + + +class TestOutput(msrest.serialization.Model): + """A stream analytics output object. + + :param output: The stream analytics output to test. + :type output: ~stream_analytics_management_client.models.Output + """ + + _attribute_map = { + 'output': {'key': 'output', 'type': 'Output'}, + } + + def __init__( + self, + *, + output: Optional["Output"] = None, + **kwargs + ): + super(TestOutput, self).__init__(**kwargs) + self.output = output + + class Transformation(SubResource): """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py index 4ddfdea9c290..20b9a712c0f3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -39,16 +39,21 @@ class ClusterProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu Canceled """ - SUCCEEDED = "Succeeded" #: The cluster provisioning succeeded. - FAILED = "Failed" #: The cluster provisioning failed. - CANCELED = "Canceled" #: The cluster provisioning was canceled. - IN_PROGRESS = "InProgress" #: The cluster provisioning was inprogress. + #: The cluster provisioning succeeded. + SUCCEEDED = "Succeeded" + #: The cluster provisioning failed. + FAILED = "Failed" + #: The cluster provisioning was canceled. + CANCELED = "Canceled" + #: The cluster provisioning was inprogress. + IN_PROGRESS = "InProgress" class ClusterSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. """ - DEFAULT = "Default" #: The default SKU. + #: The default SKU. + DEFAULT = "Default" class CompatibilityLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Controls certain runtime behaviors of the streaming job. @@ -93,16 +98,26 @@ class JobState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The current execution state of the streaming job. """ - CREATED = "Created" #: The job is currently in the Created state. - STARTING = "Starting" #: The job is currently in the Starting state. - RUNNING = "Running" #: The job is currently in the Running state. - STOPPING = "Stopping" #: The job is currently in the Stopping state. - STOPPED = "Stopped" #: The job is currently in the Stopped state. - DELETING = "Deleting" #: The job is currently in the Deleting state. - FAILED = "Failed" #: The job is currently in the Failed state. - DEGRADED = "Degraded" #: The job is currently in the Degraded state. - RESTARTING = "Restarting" #: The job is currently in the Restarting state. - SCALING = "Scaling" #: The job is currently in the Scaling state. + #: The job is currently in the Created state. + CREATED = "Created" + #: The job is currently in the Starting state. + STARTING = "Starting" + #: The job is currently in the Running state. + RUNNING = "Running" + #: The job is currently in the Stopping state. + STOPPING = "Stopping" + #: The job is currently in the Stopped state. + STOPPED = "Stopped" + #: The job is currently in the Deleting state. + DELETING = "Deleting" + #: The job is currently in the Failed state. + FAILED = "Failed" + #: The job is currently in the Degraded state. + DEGRADED = "Degraded" + #: The job is currently in the Restarting state. + RESTARTING = "Restarting" + #: The job is currently in the Scaling state. + SCALING = "Scaling" class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. @@ -141,8 +156,45 @@ class OutputStartMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CUSTOM_TIME = "CustomTime" LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" +class QueryTestingResultStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the query testing request. + """ + + #: The query testing operation was initiated. + STARTED = "Started" + #: The query testing operation succeeded. + SUCCESS = "Success" + #: The query testing operation failed due to a compiler error. + COMPILER_ERROR = "CompilerError" + #: The query testing operation failed due to a runtime error. + RUNTIME_ERROR = "RuntimeError" + #: The query testing operation failed due to a timeout. + TIMEOUT = "Timeout" + #: The query testing operation failed due to an unknown error . + UNKNOWN_ERROR = "UnknownError" + +class SampleInputResultStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the sample input request. + """ + + #: The sample input operation successfully read all the events in the range. + READ_ALL_EVENTS_IN_RANGE = "ReadAllEventsInRange" + #: The sample input operation found no events in the range. + NO_EVENTS_FOUND_IN_RANGE = "NoEventsFoundInRange" + #: The sample input operation failed to connect to the input. + ERROR_CONNECTING_TO_INPUT = "ErrorConnectingToInput" + class StreamingJobSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The name of the SKU. Required on PUT (CreateOrReplace) requests. """ STANDARD = "Standard" + +class TestDatasourceResultStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the test input or output request. + """ + + #: The test datasource operation succeeded. + TEST_SUCCEEDED = "TestSucceeded" + #: The test datasource operation failed. + TEST_FAILED = "TestFailed" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py index 2b0d23f60677..1260caff38e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class ClustersOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -51,13 +51,13 @@ def _create_or_update_initial( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Cluster" - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + # type: (...) -> "_models.Cluster" + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -97,7 +97,7 @@ def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -116,12 +116,12 @@ def begin_create_or_update( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.Cluster"] + # type: (...) -> LROPoller["_models.Cluster"] """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -140,8 +140,8 @@ def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) @@ -149,7 +149,7 @@ def begin_create_or_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -176,7 +176,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -194,12 +200,12 @@ def _update_initial( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Optional["models.Cluster"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + # type: (...) -> Optional["_models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -237,7 +243,7 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -254,11 +260,11 @@ def begin_update( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.Cluster"] + # type: (...) -> LROPoller["_models.Cluster"] """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -275,8 +281,8 @@ def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) @@ -284,7 +290,7 @@ def begin_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -310,7 +316,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -330,7 +342,7 @@ def get( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Cluster" + # type: (...) -> "_models.Cluster" """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -342,7 +354,7 @@ def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -373,7 +385,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -422,7 +434,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -445,8 +457,8 @@ def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -475,7 +487,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -493,7 +511,7 @@ def list_by_subscription( self, **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterListResult"] + # type: (...) -> Iterable["_models.ClusterListResult"] """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response @@ -501,7 +519,7 @@ def list_by_subscription( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -546,7 +564,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -562,7 +580,7 @@ def list_by_resource_group( resource_group_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterListResult"] + # type: (...) -> Iterable["_models.ClusterListResult"] """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -572,7 +590,7 @@ def list_by_resource_group( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -618,7 +636,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -635,7 +653,7 @@ def list_streaming_jobs( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterJobListResult"] + # type: (...) -> Iterable["_models.ClusterJobListResult"] """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -647,7 +665,7 @@ def list_streaming_jobs( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -694,7 +712,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py index 31063c85850b..379acb6fa5e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class FunctionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function, # type: "models.Function" + function, # type: "_models.Function" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,7 +81,7 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -144,11 +144,11 @@ def update( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function, # type: "models.Function" + function, # type: "_models.Function" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -173,7 +173,7 @@ def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -289,7 +289,7 @@ def get( function_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,7 +303,7 @@ def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -354,7 +354,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.FunctionListResult"] + # type: (...) -> Iterable["_models.FunctionListResult"] """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -370,7 +370,7 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -434,11 +434,11 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function=None, # type: Optional["models.Function"] + function=None, # type: Optional["_models.Function"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -495,10 +495,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function=None, # type: Optional["models.Function"] + function=None, # type: Optional["_models.Function"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -517,8 +517,8 @@ def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -526,7 +526,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -552,7 +552,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -571,10 +578,10 @@ def retrieve_default_definition( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function_retrieve_default_definition_parameters=None, # type: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] + function_retrieve_default_definition_parameters=None, # type: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -591,7 +598,7 @@ def retrieve_default_definition( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py index 890d33f1b8b1..b7426ff61928 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class InputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input, # type: "models.Input" + input, # type: "_models.Input" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -80,7 +80,7 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -143,11 +143,11 @@ def update( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input, # type: "models.Input" + input, # type: "_models.Input" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -171,7 +171,7 @@ def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -287,7 +287,7 @@ def get( input_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -301,7 +301,7 @@ def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -352,7 +352,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.InputListResult"] + # type: (...) -> Iterable["_models.InputListResult"] """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -368,7 +368,7 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -432,11 +432,11 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input=None, # type: Optional["models.Input"] + input=None, # type: Optional["_models.Input"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -493,10 +493,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input=None, # type: Optional["models.Input"] + input=None, # type: Optional["_models.Input"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -514,8 +514,8 @@ def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -523,7 +523,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -549,7 +549,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py index 1a63db586859..d1bd5ef89a59 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -37,7 +37,7 @@ class Operations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,7 +49,7 @@ def list( self, **kwargs # type: Any ): - # type: (...) -> Iterable["models.OperationListResult"] + # type: (...) -> Iterable["_models.OperationListResult"] """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response @@ -57,7 +57,7 @@ def list( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py index a18f17686979..89b48cfb841e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class OutputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output, # type: "models.Output" + output, # type: "_models.Output" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,7 +81,7 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -144,11 +144,11 @@ def update( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output, # type: "models.Output" + output, # type: "_models.Output" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -173,7 +173,7 @@ def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -289,7 +289,7 @@ def get( output_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,7 +303,7 @@ def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -354,7 +354,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.OutputListResult"] + # type: (...) -> Iterable["_models.OutputListResult"] """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -370,7 +370,7 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -434,11 +434,11 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output=None, # type: Optional["models.Output"] + output=None, # type: Optional["_models.Output"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -495,10 +495,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output=None, # type: Optional["models.Output"] + output=None, # type: Optional["_models.Output"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -516,8 +516,8 @@ def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -525,7 +525,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -551,7 +551,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py index e194d816d90c..b5f49225abea 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class PrivateEndpointsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_update( resource_group_name, # type: str cluster_name, # type: str private_endpoint_name, # type: str - private_endpoint, # type: "models.PrivateEndpoint" + private_endpoint, # type: "_models.PrivateEndpoint" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.PrivateEndpoint" + # type: (...) -> "_models.PrivateEndpoint" """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,7 +81,7 @@ def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -122,7 +122,7 @@ def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -144,7 +144,7 @@ def get( private_endpoint_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.PrivateEndpoint" + # type: (...) -> "_models.PrivateEndpoint" """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -158,7 +158,7 @@ def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -190,7 +190,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -241,7 +241,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -267,8 +267,8 @@ def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -298,7 +298,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -318,7 +325,7 @@ def list_by_cluster( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.PrivateEndpointListResult"] + # type: (...) -> Iterable["_models.PrivateEndpointListResult"] """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -330,7 +337,7 @@ def list_by_cluster( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -377,7 +384,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py index f3228537a7ed..aafbf5663837 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class StreamingJobsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -51,13 +51,13 @@ def _create_or_replace_initial( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + # type: (...) -> "_models.StreamingJob" + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -118,12 +118,12 @@ def begin_create_or_replace( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.StreamingJob"] + # type: (...) -> LROPoller["_models.StreamingJob"] """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -143,8 +143,8 @@ def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) @@ -152,7 +152,7 @@ def begin_create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -182,7 +182,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -200,11 +206,11 @@ def update( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" + # type: (...) -> "_models.StreamingJob" """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -226,7 +232,7 @@ def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -334,8 +340,8 @@ def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -364,7 +370,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -385,7 +397,7 @@ def get( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" + # type: (...) -> "_models.StreamingJob" """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -402,7 +414,7 @@ def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -453,7 +465,7 @@ def list_by_resource_group( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + # type: (...) -> Iterable["_models.StreamingJobListResult"] """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -468,7 +480,7 @@ def list_by_resource_group( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -531,7 +543,7 @@ def list( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + # type: (...) -> Iterable["_models.StreamingJobListResult"] """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -544,7 +556,7 @@ def list( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -605,7 +617,7 @@ def _start_initial( self, resource_group_name, # type: str job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + start_job_parameters=None, # type: Optional["_models.StartStreamingJobParameters"] **kwargs # type: Any ): # type: (...) -> None @@ -657,7 +669,7 @@ def begin_start( self, resource_group_name, # type: str job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + start_job_parameters=None, # type: Optional["_models.StartStreamingJobParameters"] **kwargs # type: Any ): # type: (...) -> LROPoller[None] @@ -672,8 +684,8 @@ def begin_start( :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -703,7 +715,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -776,8 +794,8 @@ def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -806,7 +824,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py index 81d2bbf70cdb..9a7c858109ec 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -11,13 +11,15 @@ from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -36,7 +38,7 @@ class SubscriptionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,19 +51,18 @@ def list_quotas( location, # type: str **kwargs # type: Any ): - # type: (...) -> "models.SubscriptionQuotasListResult" + # type: (...) -> "_models.SubscriptionQuotasListResult" """Retrieves the subscription's current quota information in a particular region. - :param location: The region in which to retrieve the subscription's quota information. You can - find out which regions Azure Stream Analytics is supported in here: - https://azure.microsoft.com/en-us/regions/. + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult, or the result of cls(response) :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -100,3 +101,613 @@ def list_quotas( return deserialized list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} # type: ignore + + def _test_query_initial( + self, + location, # type: str + streaming_job, # type: "_models.StreamingJob" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.QueryTestingResult"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.QueryTestingResult"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_query_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('QueryTestingResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery'} # type: ignore + + def begin_test_query( + self, + location, # type: str + streaming_job, # type: "_models.StreamingJob" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.QueryTestingResult"] + """Test the Stream Analytics query on a sample input. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param streaming_job: A streaming job object. This object defines the input, output, and + transformation for the query testing. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either QueryTestingResult or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.QueryTestingResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryTestingResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_query_initial( + location=location, + streaming_job=streaming_job, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('QueryTestingResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_query.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery'} # type: ignore + + def _compile_query_initial( + self, + location, # type: str + streaming_job, # type: "_models.StreamingJob" + **kwargs # type: Any + ): + # type: (...) -> "_models.QueryCompilationResult" + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryCompilationResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._compile_query_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(streaming_job, 'StreamingJob') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueryCompilationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _compile_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery'} # type: ignore + + def begin_compile_query( + self, + location, # type: str + streaming_job, # type: "_models.StreamingJob" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.QueryCompilationResult"] + """Compile the Stream Analytics query. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param streaming_job: A streaming job object. This object defines the input, output, and + transformation for the query compilation. + :type streaming_job: ~stream_analytics_management_client.models.StreamingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either QueryCompilationResult or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.QueryCompilationResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryCompilationResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._compile_query_initial( + location=location, + streaming_job=streaming_job, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('QueryCompilationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_compile_query.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery'} # type: ignore + + def _sample_input_initial( + self, + location, # type: str + sample_input, # type: "_models.SampleInput" + **kwargs # type: Any + ): + # type: (...) -> "_models.SampleInputResult" + cls = kwargs.pop('cls', None) # type: ClsType["_models.SampleInputResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._sample_input_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sample_input, 'SampleInput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SampleInputResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _sample_input_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput'} # type: ignore + + def begin_sample_input( + self, + location, # type: str + sample_input, # type: "_models.SampleInput" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.SampleInputResult"] + """Sample the Stream Analytics input data. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input + data. + :type sample_input: ~stream_analytics_management_client.models.SampleInput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either SampleInputResult or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.SampleInputResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SampleInputResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._sample_input_initial( + location=location, + sample_input=sample_input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SampleInputResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_sample_input.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput'} # type: ignore + + def _test_input_initial( + self, + location, # type: str + test_input, # type: "_models.TestInput" + **kwargs # type: Any + ): + # type: (...) -> "_models.TestDatasourceResult" + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_input_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(test_input, 'TestInput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_input_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput'} # type: ignore + + def begin_test_input( + self, + location, # type: str + test_input, # type: "_models.TestInput" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.TestDatasourceResult"] + """Test the Stream Analytics input. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param test_input: Defines the necessary parameters for testing the Stream Analytics input. + :type test_input: ~stream_analytics_management_client.models.TestInput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TestDatasourceResult or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.TestDatasourceResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_input_initial( + location=location, + test_input=test_input, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_input.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput'} # type: ignore + + def _test_output_initial( + self, + location, # type: str + test_output, # type: "_models.TestOutput" + **kwargs # type: Any + ): + # type: (...) -> "_models.TestDatasourceResult" + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2017-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._test_output_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(test_output, 'TestOutput') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _test_output_initial.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput'} # type: ignore + + def begin_test_output( + self, + location, # type: str + test_output, # type: "_models.TestOutput" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.TestDatasourceResult"] + """Test the Stream Analytics output. + + :param location: The region to which the request is sent. You can find out which regions Azure + Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. + :type location: str + :param test_output: Defines the necessary parameters for testing the Stream Analytics output. + :type test_output: ~stream_analytics_management_client.models.TestOutput + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TestDatasourceResult or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~stream_analytics_management_client.models.TestDatasourceResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.TestDatasourceResult"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._test_output_initial( + location=location, + test_output=test_output, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TestDatasourceResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_test_output.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py index 49f318e3a748..29a7d238ede5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -36,7 +36,7 @@ class TransformationsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,12 +49,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str transformation_name, # type: str - transformation, # type: "models.Transformation" + transformation, # type: "_models.Transformation" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -80,7 +80,7 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -143,11 +143,11 @@ def update( resource_group_name, # type: str job_name, # type: str transformation_name, # type: str - transformation, # type: "models.Transformation" + transformation, # type: "_models.Transformation" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -173,7 +173,7 @@ def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -231,7 +231,7 @@ def get( transformation_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -245,7 +245,7 @@ def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError }