diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json b/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json index 0c370b6b4fd3..0f7d50bbcfb2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json +++ b/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json @@ -1,8 +1,8 @@ { - "autorest": "3.0.6369", - "use": "@autorest/python@5.6.2", - "commit": "f5dce125fdad97160346fe3ac32d7d0a49bb11e4", + "autorest": "3.3.0", + "use": "@autorest/python@5.6.6", + "commit": "96e8b312e8e732cd6ce52dae178ff0ae3768f8da", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.6.2 --version=3.0.6369", + "autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.6.6 --version=3.3.0", "readme": "specification/operationalinsights/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py index b68aa56c4eb7..af6a9be08f04 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py @@ -16,6 +16,7 @@ from typing import Any, Optional from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse from ._configuration import LogAnalyticsManagementClientConfiguration from .operations import DataExportsOperations @@ -33,9 +34,9 @@ from .operations import GatewaysOperations from .operations import SchemaOperations from .operations import WorkspacePurgeOperations +from .operations import TablesOperations from .operations import ClustersOperations from .operations import Operations -from .operations import TablesOperations from .operations import WorkspacesOperations from .operations import DeletedWorkspacesOperations from . import models @@ -74,12 +75,12 @@ class LogAnalyticsManagementClient(object): :vartype schema: azure.mgmt.loganalytics.operations.SchemaOperations :ivar workspace_purge: WorkspacePurgeOperations operations :vartype workspace_purge: azure.mgmt.loganalytics.operations.WorkspacePurgeOperations + :ivar tables: TablesOperations operations + :vartype tables: azure.mgmt.loganalytics.operations.TablesOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.loganalytics.operations.ClustersOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.loganalytics.operations.Operations - :ivar tables: TablesOperations operations - :vartype tables: azure.mgmt.loganalytics.operations.TablesOperations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.loganalytics.operations.WorkspacesOperations :ivar deleted_workspaces: DeletedWorkspacesOperations operations @@ -140,17 +141,35 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.workspace_purge = WorkspacePurgeOperations( self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations( + self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations( self._client, self._config, self._serialize, self._deserialize) self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) - self.tables = TablesOperations( - self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) self.deleted_workspaces = DeletedWorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + def close(self): # type: () -> None self._client.close() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_metadata.json b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_metadata.json index de20f0efd33f..92ac1efbbfaf 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_metadata.json +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_metadata.json @@ -10,8 +10,8 @@ "azure_arm": true, "has_lro_operations": true, "client_side_validation": false, - "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"LogAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}", - "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"LogAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}" + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"LogAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"LogAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" }, "global_parameters": { "sync": { @@ -113,16 +113,10 @@ "gateways": "GatewaysOperations", "schema": "SchemaOperations", "workspace_purge": "WorkspacePurgeOperations", + "tables": "TablesOperations", "clusters": "ClustersOperations", "operations": "Operations", - "tables": "TablesOperations", "workspaces": "WorkspacesOperations", "deleted_workspaces": "DeletedWorkspacesOperations" - }, - "operation_mixins": { - "sync_imports": "None", - "async_imports": "None", - "operations": { - } } } \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py index b77ac9246082..efe7276fe05f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "9.0.0" +VERSION = "7.0.0b1" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py index 24653bd57baf..b197e604a7f9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py @@ -8,6 +8,7 @@ from typing import Any, Optional, TYPE_CHECKING +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer @@ -31,9 +32,9 @@ from .operations import GatewaysOperations from .operations import SchemaOperations from .operations import WorkspacePurgeOperations +from .operations import TablesOperations from .operations import ClustersOperations from .operations import Operations -from .operations import TablesOperations from .operations import WorkspacesOperations from .operations import DeletedWorkspacesOperations from .. import models @@ -72,12 +73,12 @@ class LogAnalyticsManagementClient(object): :vartype schema: azure.mgmt.loganalytics.aio.operations.SchemaOperations :ivar workspace_purge: WorkspacePurgeOperations operations :vartype workspace_purge: azure.mgmt.loganalytics.aio.operations.WorkspacePurgeOperations + :ivar tables: TablesOperations operations + :vartype tables: azure.mgmt.loganalytics.aio.operations.TablesOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.loganalytics.aio.operations.ClustersOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.loganalytics.aio.operations.Operations - :ivar tables: TablesOperations operations - :vartype tables: azure.mgmt.loganalytics.aio.operations.TablesOperations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.loganalytics.aio.operations.WorkspacesOperations :ivar deleted_workspaces: DeletedWorkspacesOperations operations @@ -137,17 +138,34 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.workspace_purge = WorkspacePurgeOperations( self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations( + self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations( self._client, self._config, self._serialize, self._deserialize) self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) - self.tables = TablesOperations( - self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) self.deleted_workspaces = DeletedWorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + async def close(self) -> None: await self._client.close() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py index d7ad4bc230bb..0ef7b90f330d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py @@ -21,9 +21,9 @@ from ._gateways_operations import GatewaysOperations from ._schema_operations import SchemaOperations from ._workspace_purge_operations import WorkspacePurgeOperations +from ._tables_operations import TablesOperations from ._clusters_operations import ClustersOperations from ._operations import Operations -from ._tables_operations import TablesOperations from ._workspaces_operations import WorkspacesOperations from ._deleted_workspaces_operations import DeletedWorkspacesOperations @@ -43,9 +43,9 @@ 'GatewaysOperations', 'SchemaOperations', 'WorkspacePurgeOperations', + 'TablesOperations', 'ClustersOperations', 'Operations', - 'TablesOperations', 'WorkspacesOperations', 'DeletedWorkspacesOperations', ] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py index 43eb0bcbaf07..b44c0cc2678e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py @@ -63,7 +63,7 @@ def list_by_workspace( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" accept = "application/json" def prepare_request(next_link=None): @@ -144,7 +144,7 @@ async def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -212,7 +212,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" accept = "application/json" # Construct URL diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py index 913c62408417..f9ecd4b9521f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py @@ -60,6 +60,7 @@ from ._models_py3 import UserIdentityProperties from ._models_py3 import Workspace from ._models_py3 import WorkspaceCapping + from ._models_py3 import WorkspaceFeatures from ._models_py3 import WorkspaceListManagementGroupsResult from ._models_py3 import WorkspaceListResult from ._models_py3 import WorkspaceListUsagesResult @@ -123,6 +124,7 @@ from ._models import UserIdentityProperties # type: ignore from ._models import Workspace # type: ignore from ._models import WorkspaceCapping # type: ignore + from ._models import WorkspaceFeatures # type: ignore from ._models import WorkspaceListManagementGroupsResult # type: ignore from ._models import WorkspaceListResult # type: ignore from ._models import WorkspaceListUsagesResult # type: ignore @@ -206,6 +208,7 @@ 'UserIdentityProperties', 'Workspace', 'WorkspaceCapping', + 'WorkspaceFeatures', 'WorkspaceListManagementGroupsResult', 'WorkspaceListResult', 'WorkspaceListUsagesResult', diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models.py index f53b6a73c3ad..829e93be6f83 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models.py @@ -614,7 +614,7 @@ class DataSource(ProxyResource): :vartype type: str :param properties: Required. The data source properties in raw json format, each kind of data source have it's own schema. - :type properties: object + :type properties: str :param etag: The ETag of the data source. :type etag: str :param kind: Required. The kind of the DataSource. Possible values include: "WindowsEvent", @@ -645,7 +645,7 @@ class DataSource(ProxyResource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, @@ -722,7 +722,7 @@ class ErrorAdditionalInfo(msrest.serialization.Model): :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. - :vartype info: object + :vartype info: str """ _validation = { @@ -732,7 +732,7 @@ class ErrorAdditionalInfo(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, + 'info': {'key': 'info', 'type': 'str'}, } def __init__( @@ -1739,25 +1739,16 @@ class Table(ProxyResource): :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str - :param retention_in_days: The data table data retention in days, between 7 and 730. Setting + :param retention_in_days: The data table data retention in days, between 30 and 730. Setting this property to null will default to the workspace retention. :type retention_in_days: int - :ivar is_troubleshooting_allowed: Specifies if IsTroubleshootingEnabled property can be set for - this table. - :vartype is_troubleshooting_allowed: bool - :param is_troubleshoot_enabled: Enable or disable troubleshoot for this table. - :type is_troubleshoot_enabled: bool - :ivar last_troubleshoot_date: Last time when troubleshooting was set for this table. - :vartype last_troubleshoot_date: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'retention_in_days': {'maximum': 730, 'minimum': 7}, - 'is_troubleshooting_allowed': {'readonly': True}, - 'last_troubleshoot_date': {'readonly': True}, + 'retention_in_days': {'maximum': 730, 'minimum': 30}, } _attribute_map = { @@ -1765,9 +1756,6 @@ class Table(ProxyResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, - 'is_troubleshooting_allowed': {'key': 'properties.isTroubleshootingAllowed', 'type': 'bool'}, - 'is_troubleshoot_enabled': {'key': 'properties.isTroubleshootEnabled', 'type': 'bool'}, - 'last_troubleshoot_date': {'key': 'properties.lastTroubleshootDate', 'type': 'str'}, } def __init__( @@ -1776,9 +1764,6 @@ def __init__( ): super(Table, self).__init__(**kwargs) self.retention_in_days = kwargs.get('retention_in_days', None) - self.is_troubleshooting_allowed = None - self.is_troubleshoot_enabled = kwargs.get('is_troubleshoot_enabled', None) - self.last_troubleshoot_date = None class TablesListResult(msrest.serialization.Model): @@ -1951,8 +1936,16 @@ class Workspace(TrackedResource): :ivar private_link_scoped_resources: List of linked private link scope resources. :vartype private_link_scoped_resources: list[~azure.mgmt.loganalytics.models.PrivateLinkScopedResource] - :param features: Workspace features. - :type features: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str """ _validation = { @@ -1984,7 +1977,10 @@ class Workspace(TrackedResource): 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, - 'features': {'key': 'properties.features', 'type': '{object}'}, + 'enable_data_export': {'key': 'properties.features.enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'properties.features.immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'properties.features.enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'properties.features.clusterResourceId', 'type': 'str'}, } def __init__( @@ -2004,7 +2000,10 @@ def __init__( self.public_network_access_for_query = kwargs.get('public_network_access_for_query', "Enabled") self.force_cmk_for_query = kwargs.get('force_cmk_for_query', None) self.private_link_scoped_resources = None - self.features = kwargs.get('features', None) + self.enable_data_export = kwargs.get('enable_data_export', None) + self.immediate_purge_data_on30_days = kwargs.get('immediate_purge_data_on30_days', None) + self.enable_log_access_using_only_resource_permissions = kwargs.get('enable_log_access_using_only_resource_permissions', None) + self.cluster_resource_id = kwargs.get('cluster_resource_id', None) class WorkspaceCapping(msrest.serialization.Model): @@ -2043,6 +2042,44 @@ def __init__( self.data_ingestion_status = None +class WorkspaceFeatures(msrest.serialization.Model): + """Workspace features. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, str] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'enable_data_export': {'key': 'enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'clusterResourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceFeatures, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.enable_data_export = kwargs.get('enable_data_export', None) + self.immediate_purge_data_on30_days = kwargs.get('immediate_purge_data_on30_days', None) + self.enable_log_access_using_only_resource_permissions = kwargs.get('enable_log_access_using_only_resource_permissions', None) + self.cluster_resource_id = kwargs.get('cluster_resource_id', None) + + class WorkspaceListManagementGroupsResult(msrest.serialization.Model): """The list workspace management groups operation response. @@ -2148,8 +2185,16 @@ class WorkspacePatch(AzureEntityResource): :ivar private_link_scoped_resources: List of linked private link scope resources. :vartype private_link_scoped_resources: list[~azure.mgmt.loganalytics.models.PrivateLinkScopedResource] - :param features: Workspace features. - :type features: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str """ _validation = { @@ -2180,7 +2225,10 @@ class WorkspacePatch(AzureEntityResource): 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, - 'features': {'key': 'properties.features', 'type': '{object}'}, + 'enable_data_export': {'key': 'properties.features.enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'properties.features.immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'properties.features.enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'properties.features.clusterResourceId', 'type': 'str'}, } def __init__( @@ -2200,7 +2248,10 @@ def __init__( self.public_network_access_for_query = kwargs.get('public_network_access_for_query', "Enabled") self.force_cmk_for_query = kwargs.get('force_cmk_for_query', None) self.private_link_scoped_resources = None - self.features = kwargs.get('features', None) + self.enable_data_export = kwargs.get('enable_data_export', None) + self.immediate_purge_data_on30_days = kwargs.get('immediate_purge_data_on30_days', None) + self.enable_log_access_using_only_resource_permissions = kwargs.get('enable_log_access_using_only_resource_permissions', None) + self.cluster_resource_id = kwargs.get('cluster_resource_id', None) class WorkspacePurgeBody(msrest.serialization.Model): @@ -2334,23 +2385,18 @@ class WorkspaceSku(msrest.serialization.Model): :param capacity_reservation_level: The capacity reservation level for this workspace, when CapacityReservation sku is selected. :type capacity_reservation_level: int - :ivar max_capacity_reservation_level: The maximum capacity reservation level available for this - workspace, when CapacityReservation sku is selected. - :vartype max_capacity_reservation_level: int :ivar last_sku_update: The last time when the sku was updated. :vartype last_sku_update: str """ _validation = { 'name': {'required': True}, - 'max_capacity_reservation_level': {'readonly': True}, 'last_sku_update': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'int'}, - 'max_capacity_reservation_level': {'key': 'maxCapacityReservationLevel', 'type': 'int'}, 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, } @@ -2361,5 +2407,4 @@ def __init__( super(WorkspaceSku, self).__init__(**kwargs) self.name = kwargs['name'] self.capacity_reservation_level = kwargs.get('capacity_reservation_level', None) - self.max_capacity_reservation_level = None self.last_sku_update = None diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py index 01d621fca8a8..d5049ddb3a26 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py @@ -657,7 +657,7 @@ class DataSource(ProxyResource): :vartype type: str :param properties: Required. The data source properties in raw json format, each kind of data source have it's own schema. - :type properties: object + :type properties: str :param etag: The ETag of the data source. :type etag: str :param kind: Required. The kind of the DataSource. Possible values include: "WindowsEvent", @@ -688,7 +688,7 @@ class DataSource(ProxyResource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, @@ -697,7 +697,7 @@ class DataSource(ProxyResource): def __init__( self, *, - properties: object, + properties: str, kind: Union[str, "DataSourceKind"], etag: Optional[str] = None, tags: Optional[Dict[str, str]] = None, @@ -775,7 +775,7 @@ class ErrorAdditionalInfo(msrest.serialization.Model): :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. - :vartype info: object + :vartype info: str """ _validation = { @@ -785,7 +785,7 @@ class ErrorAdditionalInfo(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, + 'info': {'key': 'info', 'type': 'str'}, } def __init__( @@ -1913,25 +1913,16 @@ class Table(ProxyResource): :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str - :param retention_in_days: The data table data retention in days, between 7 and 730. Setting + :param retention_in_days: The data table data retention in days, between 30 and 730. Setting this property to null will default to the workspace retention. :type retention_in_days: int - :ivar is_troubleshooting_allowed: Specifies if IsTroubleshootingEnabled property can be set for - this table. - :vartype is_troubleshooting_allowed: bool - :param is_troubleshoot_enabled: Enable or disable troubleshoot for this table. - :type is_troubleshoot_enabled: bool - :ivar last_troubleshoot_date: Last time when troubleshooting was set for this table. - :vartype last_troubleshoot_date: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'retention_in_days': {'maximum': 730, 'minimum': 7}, - 'is_troubleshooting_allowed': {'readonly': True}, - 'last_troubleshoot_date': {'readonly': True}, + 'retention_in_days': {'maximum': 730, 'minimum': 30}, } _attribute_map = { @@ -1939,23 +1930,16 @@ class Table(ProxyResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, - 'is_troubleshooting_allowed': {'key': 'properties.isTroubleshootingAllowed', 'type': 'bool'}, - 'is_troubleshoot_enabled': {'key': 'properties.isTroubleshootEnabled', 'type': 'bool'}, - 'last_troubleshoot_date': {'key': 'properties.lastTroubleshootDate', 'type': 'str'}, } def __init__( self, *, retention_in_days: Optional[int] = None, - is_troubleshoot_enabled: Optional[bool] = None, **kwargs ): super(Table, self).__init__(**kwargs) self.retention_in_days = retention_in_days - self.is_troubleshooting_allowed = None - self.is_troubleshoot_enabled = is_troubleshoot_enabled - self.last_troubleshoot_date = None class TablesListResult(msrest.serialization.Model): @@ -2140,8 +2124,16 @@ class Workspace(TrackedResource): :ivar private_link_scoped_resources: List of linked private link scope resources. :vartype private_link_scoped_resources: list[~azure.mgmt.loganalytics.models.PrivateLinkScopedResource] - :param features: Workspace features. - :type features: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str """ _validation = { @@ -2173,7 +2165,10 @@ class Workspace(TrackedResource): 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, - 'features': {'key': 'properties.features', 'type': '{object}'}, + 'enable_data_export': {'key': 'properties.features.enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'properties.features.immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'properties.features.enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'properties.features.clusterResourceId', 'type': 'str'}, } def __init__( @@ -2189,7 +2184,10 @@ def __init__( public_network_access_for_ingestion: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", public_network_access_for_query: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", force_cmk_for_query: Optional[bool] = None, - features: Optional[Dict[str, object]] = None, + enable_data_export: Optional[bool] = None, + immediate_purge_data_on30_days: Optional[bool] = None, + enable_log_access_using_only_resource_permissions: Optional[bool] = None, + cluster_resource_id: Optional[str] = None, **kwargs ): super(Workspace, self).__init__(tags=tags, location=location, **kwargs) @@ -2205,7 +2203,10 @@ def __init__( self.public_network_access_for_query = public_network_access_for_query self.force_cmk_for_query = force_cmk_for_query self.private_link_scoped_resources = None - self.features = features + self.enable_data_export = enable_data_export + self.immediate_purge_data_on30_days = immediate_purge_data_on30_days + self.enable_log_access_using_only_resource_permissions = enable_log_access_using_only_resource_permissions + self.cluster_resource_id = cluster_resource_id class WorkspaceCapping(msrest.serialization.Model): @@ -2246,6 +2247,50 @@ def __init__( self.data_ingestion_status = None +class WorkspaceFeatures(msrest.serialization.Model): + """Workspace features. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, str] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'enable_data_export': {'key': 'enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'clusterResourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, str]] = None, + enable_data_export: Optional[bool] = None, + immediate_purge_data_on30_days: Optional[bool] = None, + enable_log_access_using_only_resource_permissions: Optional[bool] = None, + cluster_resource_id: Optional[str] = None, + **kwargs + ): + super(WorkspaceFeatures, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.enable_data_export = enable_data_export + self.immediate_purge_data_on30_days = immediate_purge_data_on30_days + self.enable_log_access_using_only_resource_permissions = enable_log_access_using_only_resource_permissions + self.cluster_resource_id = cluster_resource_id + + class WorkspaceListManagementGroupsResult(msrest.serialization.Model): """The list workspace management groups operation response. @@ -2357,8 +2402,16 @@ class WorkspacePatch(AzureEntityResource): :ivar private_link_scoped_resources: List of linked private link scope resources. :vartype private_link_scoped_resources: list[~azure.mgmt.loganalytics.models.PrivateLinkScopedResource] - :param features: Workspace features. - :type features: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str """ _validation = { @@ -2389,7 +2442,10 @@ class WorkspacePatch(AzureEntityResource): 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, - 'features': {'key': 'properties.features', 'type': '{object}'}, + 'enable_data_export': {'key': 'properties.features.enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'properties.features.immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'properties.features.enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'properties.features.clusterResourceId', 'type': 'str'}, } def __init__( @@ -2403,7 +2459,10 @@ def __init__( public_network_access_for_ingestion: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", public_network_access_for_query: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", force_cmk_for_query: Optional[bool] = None, - features: Optional[Dict[str, object]] = None, + enable_data_export: Optional[bool] = None, + immediate_purge_data_on30_days: Optional[bool] = None, + enable_log_access_using_only_resource_permissions: Optional[bool] = None, + cluster_resource_id: Optional[str] = None, **kwargs ): super(WorkspacePatch, self).__init__(**kwargs) @@ -2419,7 +2478,10 @@ def __init__( self.public_network_access_for_query = public_network_access_for_query self.force_cmk_for_query = force_cmk_for_query self.private_link_scoped_resources = None - self.features = features + self.enable_data_export = enable_data_export + self.immediate_purge_data_on30_days = immediate_purge_data_on30_days + self.enable_log_access_using_only_resource_permissions = enable_log_access_using_only_resource_permissions + self.cluster_resource_id = cluster_resource_id class WorkspacePurgeBody(msrest.serialization.Model): @@ -2565,23 +2627,18 @@ class WorkspaceSku(msrest.serialization.Model): :param capacity_reservation_level: The capacity reservation level for this workspace, when CapacityReservation sku is selected. :type capacity_reservation_level: int - :ivar max_capacity_reservation_level: The maximum capacity reservation level available for this - workspace, when CapacityReservation sku is selected. - :vartype max_capacity_reservation_level: int :ivar last_sku_update: The last time when the sku was updated. :vartype last_sku_update: str """ _validation = { 'name': {'required': True}, - 'max_capacity_reservation_level': {'readonly': True}, 'last_sku_update': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'int'}, - 'max_capacity_reservation_level': {'key': 'maxCapacityReservationLevel', 'type': 'int'}, 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, } @@ -2595,5 +2652,4 @@ def __init__( super(WorkspaceSku, self).__init__(**kwargs) self.name = name self.capacity_reservation_level = capacity_reservation_level - self.max_capacity_reservation_level = None self.last_sku_update = None diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py index d7ad4bc230bb..0ef7b90f330d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py @@ -21,9 +21,9 @@ from ._gateways_operations import GatewaysOperations from ._schema_operations import SchemaOperations from ._workspace_purge_operations import WorkspacePurgeOperations +from ._tables_operations import TablesOperations from ._clusters_operations import ClustersOperations from ._operations import Operations -from ._tables_operations import TablesOperations from ._workspaces_operations import WorkspacesOperations from ._deleted_workspaces_operations import DeletedWorkspacesOperations @@ -43,9 +43,9 @@ 'GatewaysOperations', 'SchemaOperations', 'WorkspacePurgeOperations', + 'TablesOperations', 'ClustersOperations', 'Operations', - 'TablesOperations', 'WorkspacesOperations', 'DeletedWorkspacesOperations', ] diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py index 48ef27022d7d..fcee80468d10 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py @@ -68,7 +68,7 @@ def list_by_workspace( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" accept = "application/json" def prepare_request(next_link=None): @@ -150,7 +150,7 @@ def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -219,7 +219,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-10-01" + api_version = "2020-08-01" accept = "application/json" # Construct URL