diff --git a/doc/dev/README.md b/doc/dev/README.md index 6b3e6ce09fed..02185160ea01 100644 --- a/doc/dev/README.md +++ b/doc/dev/README.md @@ -6,7 +6,7 @@ Overview of the documents: - [Developer Set-Up](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/dev_setup.md) : How to create a development environment for this repo - [Release](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/release.md) : How to release a package when ready - [Packaging](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/packaging.md) : How to organize packaging information for packages under `azure` -- [Testing](./tests.md): How to write unit and functional tests for a library +- [Testing](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md): How to write unit and functional tests for a library - [Docstrings and Type hints](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/docstring_typehint.md): How to document an SDK for compatability with internal tools (API View) and our documentation at [MS Docs][ms_docs] and the [azure.github.io][azure_github_io] site. The [mgmt](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/mgmt) folder contains information specific to management packages (i.e. packages prefixed by `azure-mgmt`) diff --git a/doc/dev/docstring_typehint.md b/doc/dev/docstring_typehint.md index 2f8e17f8e253..0df46f8b3345 100644 --- a/doc/dev/docstring_typehint.md +++ b/doc/dev/docstring_typehint.md @@ -4,7 +4,7 @@ All public methods should have docstrings to document the parameters, keywords, * [Docstrings](#docstrings) * [Method Docstrings](#method_docstrings) - * [Model and Client Docstrings](#Model_and_Client_Docstrings) + * [Model and Client Docstrings](#model_and_Client_Docstrings) * [Type Hints](#type_hints) * [Type Hints for Python 2.7 and 3.5+](#type_hints_for_python_2.7_and_3.5+) * [Type Hints for Python 3.5+](#type_hints_for_python_3.5+) diff --git a/doc/dev/test_proxy_migration_guide.md b/doc/dev/test_proxy_migration_guide.md new file mode 100644 index 000000000000..ff31703912b9 --- /dev/null +++ b/doc/dev/test_proxy_migration_guide.md @@ -0,0 +1,176 @@ +# Guide for migrating to the test proxy from vcrpy + +This guide describes the changes that service SDKs should make to their test frameworks in order to take advantage of +the Azure SDK test proxy. + +Documentation of the motivations and goals of the test proxy can be found [here][general_docs] in the azure-sdk-tools +GitHub repository, and documentation of how to set up and use the proxy can be found [here][detailed_docs]. + +## Update existing tests + +### Current test structure + +Test classes currently inherit from AzureTestCase, and test methods can optionally use decorators: + +```py +from devtools_testutils import AzureTestCase + +class TestExample(AzureTestCase): + + def test_example(self): + ... + + @ExamplePreparer() + def test_example_with_preparer(self): + ... +``` + +### New test structure + +To use the proxy, test classes should inherit from AzureRecordedTestCase and recorded test methods should use a +RecordedByProxy decorator: + +```py +from devtools_testutils import AzureRecordedTestCase, RecordedByProxy + +class TestExample(AzureRecordedTestCase): + + @RecordedByProxy + def test_example(self): + ... + + @ExamplePreparer() + @RecordedByProxy + def test_example_with_preparer(self): + ... +``` + +For async tests, import the RecordedByProxyAsync decorator from `devtools_testutils.aio` and use it in the same +way as RecordedByProxy. + +> **Note:** since AzureRecordedTestCase doesn't inherit from `unittest.TestCase`, test class names need to start +> with "Test" in order to be properly collected by pytest by default. For more information, please refer to +> [pytest's documentation][pytest_collection]. + +## Run the tests + +### Perform one-time setup + +The test proxy is made available for your tests via a Docker container. Some tests require an SSL connection to work, so +the Docker image used for the container has a certificate imported that you need to trust on your machine. Instructions +on how to do so can be found [here][proxy_cert_docs]. + +### Start the proxy server + +There is a [PowerShell script][docker_start_proxy] in `eng/common/testproxy` that will fetch the proxy Docker image if +you don't already have it, and will start or stop a container running the image for you. You can run the following +command from the root of the `azure-sdk-for-python` directory to start the container whenever you want to make the test +proxy available for running tests: + +```powershell +.\eng\common\testproxy\docker-start-proxy.ps1 "start" +``` + +Note that the proxy is available as long as the container is running. In other words, you don't need to start and +stop the container for each test run or between tests for different SDKs. You can run the above command in the morning +and just stop the container whenever you'd like. To stop the container, run the same command but with `"stop"` in place +of `"start"`. In the future, the proxy container will be set up and started automatically when tests are run, and +starting it manually will be optional. + +For more details on proxy startup, please refer to the [proxy documentation][detailed_docs]. + +### Record or play back tests + +Configuring live and playback tests is done with the `AZURE_TEST_RUN_LIVE` environment variable. When this variable is +set to "true" or "yes", live tests will run and produce recordings. When this variable is set to "false" or "no", or +not set at all, tests will run in playback mode and attempt to match existing recordings. + +Recordings for a given package will end up in that package's `/tests/recordings` directory, just like they currently +do. + +> **Note:** at this time, support for configuring live or playback tests with a `testsettings_local.cfg` file has been +> deprecated in favor of using just `AZURE_TEST_RUN_LIVE`. + +### Register sanitizers + +Since the test proxy doesn't use [`vcrpy`][vcrpy], tests don't use a scrubber to sanitize values in recordings. +Instead, sanitizers (as well as matchers and transforms) can be registered on the proxy as detailed in +[this][sanitizers] section of the proxy documentation. At the time of writing, sanitizers can be registered via the +`add_sanitizer` method in `devtools_testutils`. + +Sanitizers, matchers, and transforms remain registered until the proxy container is stopped, so for any sanitizers that +are shared by different tests, using a session fixture declared in a `conftest.py` file is recommended. Please refer to +[pytest's scoped fixture documentation][pytest_fixtures] for more details. + +For example, to sanitize URIs in recordings, you can set up a URI sanitizer for all tests in the pytest session by +adding something like the following in the package's `conftest.py` file: + +```python +from devtools_testutils import add_sanitizer + +@pytest.fixture(scope="session") +def sanitize_uris(): + add_sanitizer(ProxyRecordingSanitizer.URI, value="fakeendpoint") +``` + +`add_sanitizer` accepts a sanitizer, matcher, or transform type from the ProxyRecordingSanitizer enum as a required +parameter. Keyword-only arguments can be provided to customize the sanitizer; for example, in the snippet above, any +request URIs that match the default URI regular expression will have their domain name replaced with "fakeendpoint". A +request made to `https://tableaccount.table.core.windows.net` will be recorded as being made to +`https://fakeendpoint.table.core.windows.net`. + +## Implementation details + +### What does the test proxy do? + +The gist of the test proxy is that it stands in between your tests and the service. What this means is that test +requests which would usually go straight to the service should instead point to the locally-hosted test proxy. + +For example, if an operation would typically make a GET request to +`https://fakeazsdktestaccount.table.core.windows.net/Tables`, that operation should now be sent to +`https://localhost:5001/Tables` instead. The original endpoint should be stored in an `x-recording-upstream-base-uri` -- +the proxy will send the original request and record the result. + +The RecordedByProxy and RecordedByProxyAsync decorators patch test requests to do this for you. + +### How does the test proxy know when and what to record or play back? + +This is achieved by making POST requests to the proxy server that say whether to start or stop recording or playing +back, as well as what test is being run. + +To start recording a test, the server should be primed with a POST request: + +``` +URL: https://localhost:5001/record/start +headers { + "x-recording-file": "/recordings/." +} +``` + +This will return a recording ID in an `x-recording-id` header. This ID should be sent as an `x-recording-id` header in +all further requests during the test. + +After the test has finished, a POST request should be sent to indicate that recording is complete: + +``` +URL: https://localhost:5001/record/stop +headers { + "x-recording-id": "" +} +``` + +Running tests in playback follows the same pattern, except that requests will be sent to `/playback/start` and +`/playback/stop` instead. A header, `x-recording-mode`, should be set to `record` for all requests when recording and +`playback` when playing recordings back. More details can be found [here][detailed_docs]. + +The RecordedByProxy and RecordedByProxyAsync decorators send the appropriate requests at the start and end of each test +case. + +[detailed_docs]: https://github.com/Azure/azure-sdk-tools/tree/main/tools/test-proxy/Azure.Sdk.Tools.TestProxy/README.md +[docker_start_proxy]: https://github.com/Azure/azure-sdk-for-python/blob/main/eng/common/testproxy/docker-start-proxy.ps1 +[general_docs]: https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/README.md +[proxy_cert_docs]: https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/documentation/trusting-cert-per-language.md +[pytest_collection]: https://docs.pytest.org/latest/goodpractices.html#test-discovery +[pytest_fixtures]: https://docs.pytest.org/latest/fixture.html#scope-sharing-fixtures-across-classes-modules-packages-or-session +[sanitizers]: https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/Azure.Sdk.Tools.TestProxy/README.md#session-and-test-level-transforms-sanitiziers-and-matchers +[vcrpy]: https://vcrpy.readthedocs.io diff --git a/eng/common/TestResources/New-TestResources.ps1 b/eng/common/TestResources/New-TestResources.ps1 index 508b9b693976..efb4796bc521 100644 --- a/eng/common/TestResources/New-TestResources.ps1 +++ b/eng/common/TestResources/New-TestResources.ps1 @@ -36,7 +36,7 @@ param ( [ValidateNotNullOrEmpty()] [string] $TenantId, - # Azure SDK Developer Playground subscription + # Azure SDK Developer Playground subscription is assumed if not set [Parameter()] [ValidatePattern('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')] [string] $SubscriptionId, @@ -279,46 +279,13 @@ try { if (!$TenantId) { $TenantId = $context.Subscription.TenantId } - - # If no test application ID is specified during an interactive session, create a new service principal. - if (!$TestApplicationId) { - - # Cache the created service principal in this session for frequent reuse. - $servicePrincipal = if ($AzureTestPrincipal -and (Get-AzADServicePrincipal -ApplicationId $AzureTestPrincipal.ApplicationId) -and $AzureTestSubscription -eq $SubscriptionId) { - Log "TestApplicationId was not specified; loading cached service principal '$($AzureTestPrincipal.ApplicationId)'" - $AzureTestPrincipal - } else { - Log "TestApplicationId was not specified; creating a new service principal in subscription '$SubscriptionId'" - $suffix = (New-Guid).ToString('n').Substring(0, 4) - $global:AzureTestPrincipal = New-AzADServicePrincipal -Role Owner -Scope "/subscriptions/$SubscriptionId" -DisplayName "test-resources-$($baseName)$suffix.microsoft.com" - $global:AzureTestSubscription = $SubscriptionId - - Log "Created service principal '$($AzureTestPrincipal.ApplicationId)'" - $AzureTestPrincipal - } - - $TestApplicationId = $servicePrincipal.ApplicationId - $TestApplicationSecret = (ConvertFrom-SecureString $servicePrincipal.Secret -AsPlainText); - - # Make sure pre- and post-scripts are passed formerly required arguments. - $PSBoundParameters['TestApplicationId'] = $TestApplicationId - $PSBoundParameters['TestApplicationSecret'] = $TestApplicationSecret - } - - if (!$ProvisionerApplicationId) { - $ProvisionerApplicationId = $TestApplicationId - $ProvisionerApplicationSecret = $TestApplicationSecret - $TenantId = $context.Tenant.Id - } } - # Log in as and run pre- and post-scripts as the provisioner service principal. + # If a provisioner service principal was provided, log into it to perform the pre- and post-scripts and deployments. if ($ProvisionerApplicationId) { $null = Disable-AzContextAutosave -Scope Process Log "Logging into service principal '$ProvisionerApplicationId'." - Write-Warning 'Logging into service principal may fail until the principal is fully propagated.' - $provisionerSecret = ConvertTo-SecureString -String $ProvisionerApplicationSecret -AsPlainText -Force $provisionerCredential = [System.Management.Automation.PSCredential]::new($ProvisionerApplicationId, $provisionerSecret) @@ -343,20 +310,25 @@ try { } } - # Get test application OID from ID if not already provided. - if ($TestApplicationId -and !$TestApplicationOid) { - $testServicePrincipal = Retry { - Get-AzADServicePrincipal -ApplicationId $TestApplicationId - } + # Determine the Azure context that the script is running in. + $context = Get-AzContext; - if ($testServicePrincipal -and $testServicePrincipal.Id) { - $script:TestApplicationOid = $testServicePrincipal.Id + # Make sure the provisioner OID is set so we can pass it through to the deployment. + $provisionerApplicationOid = if (!$ProvisionerApplicationId) { + if ($context.Account.Type -eq 'User') { + $user = Get-AzADUser -UserPrincipalName $context.Account.Id + $user.Id + } elseif ($context.Account.Type -eq 'ServicePrincipal') { + $sp = Get-AzADServicePrincipal -ApplicationId $context.Account.Id + $sp.Id + } else { + Write-Warning "Getting the OID for provisioner type '$($context.Account.Type)' is not supported and will not be passed to deployments (seldom required)." } + } else { + $sp = Get-AzADServicePrincipal -ApplicationId $ProvisionerApplicationId + $sp.Id } - # Determine the Azure context that the script is running in. - $context = Get-AzContext; - # If the ServiceDirectory is an absolute path use the last directory name # (e.g. D:\foo\bar\ -> bar) $serviceName = if (Split-Path -IsAbsolute $ServiceDirectory) { @@ -426,11 +398,88 @@ try { } } + # If no test application ID was specified during an interactive session, create a new service principal. + if (!$CI -and !$TestApplicationId) { + # Cache the created service principal in this session for frequent reuse. + $servicePrincipal = if ($AzureTestPrincipal -and (Get-AzADServicePrincipal -ApplicationId $AzureTestPrincipal.ApplicationId) -and $AzureTestSubscription -eq $SubscriptionId) { + Log "TestApplicationId was not specified; loading cached service principal '$($AzureTestPrincipal.ApplicationId)'" + $AzureTestPrincipal + } else { + Log "TestApplicationId was not specified; creating a new service principal in subscription '$SubscriptionId'" + $suffix = (New-Guid).ToString('n').Substring(0, 4) + + # Service principals in the Microsoft AAD tenant must end with an @microsoft.com domain; those in other tenants + # are not permitted to do so. Format the non-MS name so there's an assocation with the Azure SDK. + if ($TenantId -eq '72f988bf-86f1-41af-91ab-2d7cd011db47') { + $displayName = "test-resources-$($baseName)$suffix.microsoft.com" + } else { + $displayName = "$($baseName)$suffix.test-resources.azure.sdk" + } + + $servicePrincipal = Retry { + New-AzADServicePrincipal -Role "Owner" -Scope "/subscriptions/$SubscriptionId/resourceGroups/$ResourceGroupName" -DisplayName $displayName + } + + $global:AzureTestPrincipal = $servicePrincipal + $global:AzureTestSubscription = $SubscriptionId + + Log "Created service principal '$($AzureTestPrincipal.ApplicationId)'" + $AzureTestPrincipal + $resourceGroupRoleAssigned = $true + } + + $TestApplicationId = $servicePrincipal.ApplicationId + $TestApplicationOid = $servicePrincipal.Id + $TestApplicationSecret = (ConvertFrom-SecureString $servicePrincipal.Secret -AsPlainText) + } + + # Get test application OID from ID if not already provided. This may fail if the + # provisioner is a service principal without permissions to query AAD. This is a + # critical failure, but we should prompt with possible remediation. + if ($TestApplicationId -and !$TestApplicationOid) { + Log "Attempting to query the Object ID for the test service principal" + + try { + $testServicePrincipal = Retry { + Get-AzADServicePrincipal -ApplicationId $TestApplicationId + } + } + catch { + Write-Warning "The Object ID of the test application was unable to be queried. You may want to consider passing it explicitly with the 'TestApplicationOid` parameter." + throw $_.Exception + } + + if ($testServicePrincipal -and $testServicePrincipal.Id) { + $script:TestApplicationOid = $testServicePrincipal.Id + } + } + + # Make sure pre- and post-scripts are passed formerly required arguments. + $PSBoundParameters['TestApplicationId'] = $TestApplicationId + $PSBoundParameters['TestApplicationOid'] = $TestApplicationOid + $PSBoundParameters['TestApplicationSecret'] = $TestApplicationSecret + + # Grant the test service principal ownership over the resource group. This may fail if the provisioner is a + # service principal without permissions to grant RBAC roles to other service principals. That should not be + # considered a critical failure, as the test application may have subscription-level permissions and not require + # the explicit grant. + if (!$resourceGroupRoleAssigned) { + Log "Attempting to assigning the 'Owner' role for '$ResourceGroupName' to the Test Application '$TestApplicationId'" + $principalOwnerAssignment = New-AzRoleAssignment -RoleDefinitionName "Owner" -ApplicationId "$TestApplicationId" -ResourceGroupName "$ResourceGroupName" -ErrorAction SilentlyContinue + + if ($principalOwnerAssignment.RoleDefinitionName -eq 'Owner') { + Write-Verbose "Successfully assigned ownership of '$ResourceGroupName' to the Test Application '$TestApplicationId'" + } else { + Write-Warning "The 'Owner' role for '$ResourceGroupName' could not be assigned. You may need to manually grant 'Owner' for the resource group to the Test Application '$TestApplicationId' if it does not have subscription-level permissions." + } + } + # Populate the template parameters and merge any additional specified. $templateParameters = @{ baseName = $BaseName testApplicationId = $TestApplicationId testApplicationOid = "$TestApplicationOid" + provisionerApplicationOid = "$provisionerApplicationOid" } if ($TenantId) { @@ -484,7 +533,7 @@ try { "Deployment template $($templateFile.jsonFilePath) to resource group $($resourceGroup.ResourceGroupName)" } Log $msg - + $deployment = Retry { $lastDebugPreference = $DebugPreference try { @@ -609,22 +658,23 @@ if ($CI) { Deploys live test resources defined for a service directory to Azure. .DESCRIPTION -Deploys live test resouces specified in test-resources.json files to a resource -group. +Deploys live test resouces specified in test-resources.json or test-resources.bicep +files to a new resource group. This script searches the directory specified in $ServiceDirectory recursively -for files named test-resources.json. All found test-resources.json files will be -deployed to the test resource group. +for files named test-resources.json or test-resources.bicep. All found test-resources.json +and test-resources.bicep files will be deployed to the test resource group. -If no test-resources.json files are located the script exits without making -changes to the Azure environment. +If no test-resources.json or test-resources.bicep files are located the script +exits without making changes to the Azure environment. -A service principal must first be created before this script is run and passed -to $TestApplicationId and $TestApplicationSecret. Test resources will grant this -service principal access. +A service principal may optionally be passed to $TestApplicationId and $TestApplicationSecret. +Test resources will grant this service principal access to the created resources. +If no service principal is specified, a new one will be created and assigned the +'Owner' role for the resource group associated with the test resources. -This script uses credentials already specified in Connect-AzAccount or those -specified in $ProvisionerApplicationId and $ProvisionerApplicationSecret. +This script runs in the context of credentials already specified in Connect-AzAccount +or those specified in $ProvisionerApplicationId and $ProvisionerApplicationSecret. .PARAMETER BaseName A name to use in the resource group and passed to the ARM template as 'baseName'. @@ -636,16 +686,28 @@ by New-TestResources.ps1 if $CI is specified. .PARAMETER ResourceGroupName Set this value to deploy directly to a Resource Group that has already been -created. +created or to create a new resource group with this name. + +If not specified, the $BaseName will be used to generate name for the resource +group that will be created. .PARAMETER ServiceDirectory A directory under 'sdk' in the repository root - optionally with subdirectories -specified - in which to discover ARM templates named 'test-resources.json'. -This can also be an absolute path or specify parent directories. +specified - in which to discover ARM templates named 'test-resources.json' and +Bicep templates named 'test-resources.bicep'. This can also be an absolute path +or specify parent directories. .PARAMETER TestApplicationId -The AAD Application ID to authenticate the test runner against deployed -resources. Passed to the ARM template as 'testApplicationId'. +Optional Azure Active Directory Application ID to authenticate the test runner +against deployed resources. Passed to the ARM template as 'testApplicationId'. + +If not specified, a new AAD Application will be created and assigned the 'Owner' +role for the resource group associated with the test resources. No permissions +will be granted to the subscription or other resources. + +For those specifying a Provisioner Application principal as 'ProvisionerApplicationId', +it will need the permission 'Application.ReadWrite.OwnedBy' for the Microsoft Graph API +in order to create the Test Application principal. This application is used by the test runner to execute tests against the live test resources. @@ -659,18 +721,24 @@ This application is used by the test runner to execute tests against the live test resources. .PARAMETER TestApplicationOid -Service Principal Object ID of the AAD Test application. This is used to assign +Service Principal Object ID of the AAD Test Application. This is used to assign permissions to the AAD application so it can access tested features on the live test resources (e.g. Role Assignments on resources). It is passed as to the ARM template as 'testApplicationOid' +If not specified, an attempt will be made to query it from the Azure Active Directory +tenant. For those specifying a service principal as 'ProvisionerApplicationId', +it will need the permission 'Application.Read.All' for the Microsoft Graph API +in order to query AAD. + For more information on the relationship between AAD Applications and Service Principals see: https://docs.microsoft.com/azure/active-directory/develop/app-objects-and-service-principals .PARAMETER TenantId The tenant ID of a service principal when a provisioner is specified. The same -Tenant ID is used for Test Application and Provisioner Application. This value -is passed to the ARM template as 'tenantId'. +Tenant ID is used for Test Application and Provisioner Application. + +This value is passed to the ARM template as 'tenantId'. .PARAMETER SubscriptionId Optional subscription ID to use for new resources when logging in as a @@ -683,10 +751,22 @@ Once you are logged in (or were previously), the selected SubscriptionId will be used for subsequent operations that are specific to a subscription. .PARAMETER ProvisionerApplicationId -The AAD Application ID used to provision test resources when a provisioner is -specified. +Optional Application ID of the Azure Active Directory service principal to use for +provisioning the test resources. If not, specified New-TestResources.ps1 uses the +context of the caller to provision. + +If specified, the Provisioner Application principal would benefit from the following +permissions to the Microsoft Graph API: + + - 'Application.Read.All' in order to query AAD to obtain the 'TestApplicaitonOid' + + - 'Application.ReadWrite.OwnedBy' in order to create the Test Application principal + or grant an existing principal ownership of the resource group associated with + the test resources. -If none is specified New-TestResources.ps1 uses the TestApplicationId. +If the provisioner does not have these permissions, it can still be used with +New-TestResources.ps1 by specifying an existing Test Application principal, including +its Object ID, and managing permissions to the resource group manually. This value is not passed to the ARM template. @@ -694,8 +774,6 @@ This value is not passed to the ARM template. A service principal secret (password) used to provision test resources when a provisioner is specified. -If none is specified New-TestResources.ps1 uses the TestApplicationSecret. - This value is not passed to the ARM template. .PARAMETER DeleteAfterHours @@ -718,7 +796,7 @@ is based on the cloud to which the template is being deployed: * Dogfood -> 'westus' .PARAMETER Environment -Name of the cloud environment. The default is the Azure Public Cloud +Optional name of the cloud environment. The default is the Azure Public Cloud ('AzureCloud') .PARAMETER AdditionalParameters @@ -738,15 +816,84 @@ Deployment (CI/CD) build (only Azure Pipelines is currently supported). Force creation of resources instead of being prompted. .PARAMETER OutFile -Save test environment settings into a test-resources.json.env file next to test-resources.json. File is protected via DPAPI. Supported only on windows. -The environment file would be scoped to the current repository directory. +Save test environment settings into a .env file next to test resources template. +The contents of the file are protected via the .NET Data Protection API (DPAPI). +This is supported only on Windows. The environment file is scoped to the current +service directory. + +The environment file will be named for the test resources template that it was +generated for. For ARM templates, it will be test-resources.json.env. For +Bicep templates, test-resources.bicep.env. .EXAMPLE Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' New-TestResources.ps1 keyvault -Run this in a desktop environment to create new AAD apps and Service Principals -that can be used to provision resources and run live tests. +Run this in a desktop environment to create a new AAD application and Service Principal +for running live tests against the test resources created. The principal will have ownership +rights to the resource group and the resources that it contains, but no other resources in +the subscription. + +Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert +the SecureString to plaintext by another means. + +.EXAMPLE +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. This will also create a new AAD +application and Service Principal for running live tests against the rest resources +created. The principal will have ownership rights to the resource group and the +resources that it contains, but no other resources in the subscription. + +Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert +the SecureString to plaintext by another means. + +.EXAMPLE +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' ` + -TestApplicationId 'REPLACE_WITH_TEST_APPLICATION_ID' ` + -TestApplicationSecret 'REPLACE_WITH_TEST_APPLICATION_SECRET' + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. This will grant ownership rights +to the 'TestApplicationId' for the resource group and the resources that it contains, +without altering its existing permissions. + +.EXAMPLE +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' ` + -ProvisionerApplicationId 'REPLACE_WITH_PROVISIONER_APPLICATION_ID' ` + -ProvisionerApplicationSecret 'REPLACE_WITH_PROVISIONER_APPLICATION_ID' ` + -TestApplicationId 'REPLACE_WITH_TEST_APPLICATION_ID' ` + -TestApplicationOid 'REPLACE_WITH_TEST_APPLICATION_OBJECT_ID' ` + -TestApplicationSecret 'REPLACE_WITH_TEST_APPLICATION_SECRET' + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. The script will be executed in the +context of the 'ProvisionerApplicationId' rather than the caller. + +Depending on the permissions of the Provisioner Application principal, the script may +grant ownership rights 'TestApplicationId' for the resource group and the resources +that it contains, or may emit a message indicating that it was unable to perform the grant. + +For the Provisioner Application principal to perform the grant, it will need the +permission 'Application.ReadWrite.OwnedBy' for the Microsoft Graph API. Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert the SecureString to plaintext by another means. diff --git a/eng/common/TestResources/New-TestResources.ps1.md b/eng/common/TestResources/New-TestResources.ps1.md index b27dfba8c81a..fd96e71cfdf9 100644 --- a/eng/common/TestResources/New-TestResources.ps1.md +++ b/eng/common/TestResources/New-TestResources.ps1.md @@ -32,24 +32,24 @@ New-TestResources.ps1 [-BaseName ] [-ResourceGroupName ] [-Servi ``` ## DESCRIPTION -Deploys live test resouces specified in test-resources.json files to a resource -group. +Deploys live test resouces specified in test-resources.json or test-resources.bicep +files to a new resource group. This script searches the directory specified in $ServiceDirectory recursively -for files named test-resources.json. -All found test-resources.json files will be -deployed to the test resource group. +for files named test-resources.json or test-resources.bicep. +All found test-resources.json +and test-resources.bicep files will be deployed to the test resource group. -If no test-resources.json files are located the script exits without making -changes to the Azure environment. +If no test-resources.json or test-resources.bicep files are located the script +exits without making changes to the Azure environment. -A service principal must first be created before this script is run and passed -to $TestApplicationId and $TestApplicationSecret. -Test resources will grant this -service principal access. +A service principal may optionally be passed to $TestApplicationId and $TestApplicationSecret. +Test resources will grant this service principal access to the created resources. +If no service principal is specified, a new one will be created and assigned the +'Owner' role for the resource group associated with the test resources. -This script uses credentials already specified in Connect-AzAccount or those -specified in $ProvisionerApplicationId and $ProvisionerApplicationSecret. +This script runs in the context of credentials already specified in Connect-AzAccount +or those specified in $ProvisionerApplicationId and $ProvisionerApplicationSecret. ## EXAMPLES @@ -59,14 +59,88 @@ Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' New-TestResources.ps1 keyvault ``` -Run this in a desktop environment to create new AAD apps and Service Principals -that can be used to provision resources and run live tests. +Run this in a desktop environment to create a new AAD application and Service Principal +for running live tests against the test resources created. +The principal will have ownership +rights to the resource group and the resources that it contains, but no other resources in +the subscription. Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert the SecureString to plaintext by another means. ### EXAMPLE 2 ``` +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' +``` + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. +This will also create a new AAD +application and Service Principal for running live tests against the rest resources +created. +The principal will have ownership rights to the resource group and the +resources that it contains, but no other resources in the subscription. + +Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert +the SecureString to plaintext by another means. + +### EXAMPLE 3 +``` +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' ` + -TestApplicationId 'REPLACE_WITH_TEST_APPLICATION_ID' ` + -TestApplicationSecret 'REPLACE_WITH_TEST_APPLICATION_SECRET' +``` + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. +This will grant ownership rights +to the 'TestApplicationId' for the resource group and the resources that it contains, +without altering its existing permissions. + +### EXAMPLE 4 +``` +New-TestResources.ps1 ` + -BaseName 'azsdk' ` + -ServiceDirectory 'keyvault' ` + -SubscriptionId 'REPLACE_WITH_SUBSCRIPTION_ID' ` + -ResourceGroupName 'REPLACE_WITH_NAME_FOR_RESOURCE_GROUP' ` + -Location 'eastus' ` + -ProvisionerApplicationId 'REPLACE_WITH_PROVISIONER_APPLICATION_ID' ` + -ProvisionerApplicationSecret 'REPLACE_WITH_PROVISIONER_APPLICATION_ID' ` + -TestApplicationId 'REPLACE_WITH_TEST_APPLICATION_ID' ` + -TestApplicationOid 'REPLACE_WITH_TEST_APPLICATION_OBJECT_ID' ` + -TestApplicationSecret 'REPLACE_WITH_TEST_APPLICATION_SECRET' +``` + +Run this in a desktop environment to specify the name and location of the resource +group that test resources are being deployed to. +The script will be executed in the +context of the 'ProvisionerApplicationId' rather than the caller. + +Depending on the permissions of the Provisioner Application principal, the script may +grant ownership rights 'TestApplicationId' for the resource group and the resources +that it contains, or may emit a message indicating that it was unable to perform the grant. + +For the Provisioner Application principal to perform the grant, it will need the +permission 'Application.ReadWrite.OwnedBy' for the Microsoft Graph API. + +Requires PowerShell 7 to use ConvertFrom-SecureString -AsPlainText or convert +the SecureString to plaintext by another means. + +### EXAMPLE 5 +``` New-TestResources.ps1 ` -BaseName 'Generated' ` -ServiceDirectory '$(ServiceDirectory)' ` @@ -111,7 +185,10 @@ Accept wildcard characters: False ### -ResourceGroupName Set this value to deploy directly to a Resource Group that has already been -created. +created or to create a new resource group with this name. + +If not specified, the $BaseName will be used to generate name for the resource +group that will be created. ```yaml Type: String @@ -127,8 +204,10 @@ Accept wildcard characters: False ### -ServiceDirectory A directory under 'sdk' in the repository root - optionally with subdirectories -specified - in which to discover ARM templates named 'test-resources.json'. -This can also be an absolute path or specify parent directories. +specified - in which to discover ARM templates named 'test-resources.json' and +Bicep templates named 'test-resources.bicep'. +This can also be an absolute path +or specify parent directories. ```yaml Type: String @@ -143,10 +222,19 @@ Accept wildcard characters: False ``` ### -TestApplicationId -The AAD Application ID to authenticate the test runner against deployed -resources. +Optional Azure Active Directory Application ID to authenticate the test runner +against deployed resources. Passed to the ARM template as 'testApplicationId'. +If not specified, a new AAD Application will be created and assigned the 'Owner' +role for the resource group associated with the test resources. +No permissions +will be granted to the subscription or other resources. + +For those specifying a Provisioner Application principal as 'ProvisionerApplicationId', +it will need the permission 'Application.ReadWrite.OwnedBy' for the Microsoft Graph API +in order to create the Test Application principal. + This application is used by the test runner to execute tests against the live test resources. @@ -184,7 +272,7 @@ Accept wildcard characters: False ``` ### -TestApplicationOid -Service Principal Object ID of the AAD Test application. +Service Principal Object ID of the AAD Test Application. This is used to assign permissions to the AAD application so it can access tested features on the live test resources (e.g. @@ -192,6 +280,12 @@ Role Assignments on resources). It is passed as to the ARM template as 'testApplicationOid' +If not specified, an attempt will be made to query it from the Azure Active Directory +tenant. +For those specifying a service principal as 'ProvisionerApplicationId', +it will need the permission 'Application.Read.All' for the Microsoft Graph API +in order to query AAD. + For more information on the relationship between AAD Applications and Service Principals see: https://docs.microsoft.com/azure/active-directory/develop/app-objects-and-service-principals @@ -211,8 +305,8 @@ Accept wildcard characters: False The tenant ID of a service principal when a provisioner is specified. The same Tenant ID is used for Test Application and Provisioner Application. -This value -is passed to the ARM template as 'tenantId'. + +This value is passed to the ARM template as 'tenantId'. ```yaml Type: String @@ -250,10 +344,23 @@ Accept wildcard characters: False ``` ### -ProvisionerApplicationId -The AAD Application ID used to provision test resources when a provisioner is -specified. +Optional Application ID of the Azure Active Directory service principal to use for +provisioning the test resources. +If not, specified New-TestResources.ps1 uses the +context of the caller to provision. -If none is specified New-TestResources.ps1 uses the TestApplicationId. +If specified, the Provisioner Application principal would benefit from the following +permissions to the Microsoft Graph API: + + - 'Application.Read.All' in order to query AAD to obtain the 'TestApplicaitonOid' + + - 'Application.ReadWrite.OwnedBy' in order to create the Test Application principal + or grant an existing principal ownership of the resource group associated with + the test resources. + +If the provisioner does not have these permissions, it can still be used with +New-TestResources.ps1 by specifying an existing Test Application principal, including +its Object ID, and managing permissions to the resource group manually. This value is not passed to the ARM template. @@ -273,8 +380,6 @@ Accept wildcard characters: False A service principal secret (password) used to provision test resources when a provisioner is specified. -If none is specified New-TestResources.ps1 uses the TestApplicationSecret. - This value is not passed to the ARM template. ```yaml @@ -335,7 +440,7 @@ Accept wildcard characters: False ``` ### -Environment -Name of the cloud environment. +Optional name of the cloud environment. The default is the Azure Public Cloud ('AzureCloud') @@ -428,10 +533,17 @@ Accept wildcard characters: False ``` ### -OutFile -Save test environment settings into a test-resources.json.env file next to test-resources.json. -File is protected via DPAPI. -Supported only on windows. -The environment file would be scoped to the current repository directory. +Save test environment settings into a .env file next to test resources template. +The contents of the file are protected via the .NET Data Protection API (DPAPI). +This is supported only on Windows. +The environment file is scoped to the current +service directory. + +The environment file will be named for the test resources template that it was +generated for. +For ARM templates, it will be test-resources.json.env. +For +Bicep templates, test-resources.bicep.env. ```yaml Type: SwitchParameter diff --git a/eng/common/TestResources/README.md b/eng/common/TestResources/README.md index 59779f9f9898..8463501c0f9a 100644 --- a/eng/common/TestResources/README.md +++ b/eng/common/TestResources/README.md @@ -1,8 +1,8 @@ # Live Test Resource Management Running and recording live tests often requires first creating some resources -in Azure. Service directories that include a test-resources.json file require -running [New-TestResources.ps1][] to create these resources and output +in Azure. Service directories that include a `test-resources.json` or `test-resources.bicep` +file require running [New-TestResources.ps1][] to create these resources and output environment variables you must set. The following scripts can be used both in on your desktop for developer @@ -19,9 +19,10 @@ scenarios as well as on hosted agents for continuous integration testing. ## On the Desktop To set up your Azure account to run live tests, you'll need to log into Azure, -and set up your resources defined in test-resources.json as shown in the following -example using Azure Key Vault. The script will create a service principal automatically, -or you may create a service principal you can save and reuse subsequently. +and create the resources defined in your `test-resources.json` or `test-resources.bicep` +template as shown in the following example using Azure Key Vault. The script will create +a service principal automatically, or you may create a service principal that can be reused +subsequently. Note that `-Subscription` is an optional parameter but recommended if your account is a member of multiple subscriptions. If you didn't specify it when logging in, @@ -33,8 +34,9 @@ Connect-AzAccount -Subscription 'YOUR SUBSCRIPTION ID' eng\common\TestResources\New-TestResources.ps1 keyvault ``` -The `OutFile` switch will be set by default if you are running this for a .NET project on Windows. This will save test environment settings -into a test-resources.json.env file next to test-resources.json. The file is protected via DPAPI. +The `OutFile` switch will be set by default if you are running this for a .NET project on Windows. +This will save test environment settings into a `test-resources.json.env` file next to `test-resources.json` +or a `test-resources.bicep.env` file next to `test-resources.bicep`. The file is protected via DPAPI. The environment file would be scoped to the current repository directory and avoids the need to set environment variables or restart your IDE to recognize them. diff --git a/eng/common/pipelines/templates/steps/cache-ps-modules.yml b/eng/common/pipelines/templates/steps/cache-ps-modules.yml index 563dba40294f..ac136803164f 100644 --- a/eng/common/pipelines/templates/steps/cache-ps-modules.yml +++ b/eng/common/pipelines/templates/steps/cache-ps-modules.yml @@ -3,8 +3,12 @@ steps: . ./eng/common/scripts/Helpers/PSModule-Helpers.ps1 Write-Host "##vso[task.setvariable variable=CachedPSModulePath]$global:CurrentUserModulePath" displayName: Set PS Modules Cache Directory + # Containers should bake modules into the image to save on pipeline time + condition: and(succeeded(), eq(variables['Container'], '')) - task: Cache@2 inputs: key: 'PSModulePath | $(CacheSalt) | $(Agent.OS) | $(Build.SourcesDirectory)/eng/common/scripts/Import-AzModules.ps1' path: $(CachedPSModulePath) - displayName: Cache PS Modules \ No newline at end of file + displayName: Cache PS Modules + # Containers should bake modules into the image to save on pipeline time + condition: and(succeeded(), eq(variables['Container'], '')) diff --git a/eng/common/pipelines/templates/steps/detect-api-changes.yml b/eng/common/pipelines/templates/steps/detect-api-changes.yml new file mode 100644 index 000000000000..de4dd5675417 --- /dev/null +++ b/eng/common/pipelines/templates/steps/detect-api-changes.yml @@ -0,0 +1,19 @@ +parameters: + ArtifactPath: $(Build.ArtifactStagingDirectory) + Artifacts: [] + +steps: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/scripts/Detect-Api-Changes.ps1 + arguments: > + -ArtifactList ('${{ convertToJson(parameters.Artifacts) }}' | ConvertFrom-Json | Select-Object Name) + -ArtifactPath ${{parameters.ArtifactPath}} + -CommitSha '$(Build.SourceVersion)' + -BuildId $(Build.BuildId) + -PullRequestNumber $(System.PullRequest.PullRequestNumber) + -RepoFullName $(Build.Repository.Name) + pwsh: true + workingDirectory: $(Pipeline.Workspace) + displayName: Detect API changes + condition: and(succeededOrFailed(), eq(variables['Build.Reason'],'PullRequest')) diff --git a/eng/common/pipelines/templates/steps/verify-changelog.yml b/eng/common/pipelines/templates/steps/verify-changelog.yml index 8173c8ec599b..887ad1a97d90 100644 --- a/eng/common/pipelines/templates/steps/verify-changelog.yml +++ b/eng/common/pipelines/templates/steps/verify-changelog.yml @@ -4,7 +4,7 @@ parameters: default: 'not-specified' - name: ServiceName type: string - default: 'not-specified' + default: '' - name: ServiceDirectory type: string default: '' @@ -17,10 +17,10 @@ steps: inputs: filePath: $(Build.SourcesDirectory)/eng/common/scripts/Verify-ChangeLog.ps1 arguments: > - -PackageName ${{ parameters.PackageName }} - -ServiceDirectory ${{ coalesce(parameters.ServiceDirectory, parameters.ServiceName) }} + -PackageName '${{ parameters.PackageName }}' + -ServiceDirectory '${{ coalesce(parameters.ServiceDirectory, parameters.ServiceName) }}' -ForRelease $${{ parameters.ForRelease }} pwsh: true workingDirectory: $(Pipeline.Workspace) displayName: Verify ChangeLogEntry for ${{ parameters.PackageName }} - continueOnError: false \ No newline at end of file + continueOnError: false diff --git a/eng/common/scripts/ChangeLog-Operations.ps1 b/eng/common/scripts/ChangeLog-Operations.ps1 index 25423b0ea8ad..02f498130246 100644 --- a/eng/common/scripts/ChangeLog-Operations.ps1 +++ b/eng/common/scripts/ChangeLog-Operations.ps1 @@ -3,6 +3,7 @@ . "${PSScriptRoot}\SemVer.ps1" $RELEASE_TITLE_REGEX = "(?^\#+\s+(?$([AzureEngSemanticVersion]::SEMVER_REGEX))(\s+(?\(.+\))))" +$SECTION_HEADER_REGEX_SUFFIX = "##\s(?.*)" $CHANGELOG_UNRELEASED_STATUS = "(Unreleased)" $CHANGELOG_DATE_FORMAT = "yyyy-MM-dd" $RecommendedSectionHeaders = @("Features Added", "Breaking Changes", "Bugs Fixed", "Other Changes") @@ -41,6 +42,17 @@ function Get-ChangeLogEntriesFromContent { $changelogEntry = $null $sectionName = $null $changeLogEntries = [Ordered]@{} + $initialAtxHeader= "#" + + if ($changeLogContent[0] -match "(?^#+)\s.*") + { + $initialAtxHeader = $matches["HeaderLevel"] + } + + $sectionHeaderRegex = "^${initialAtxHeader}${SECTION_HEADER_REGEX_SUFFIX}" + $changeLogEntries | Add-Member -NotePropertyName "InitialAtxHeader" -NotePropertyValue $initialAtxHeader + $releaseTitleAtxHeader = $initialAtxHeader + "#" + try { # walk the document, finding where the version specifiers are and creating lists foreach ($line in $changeLogContent) { @@ -48,7 +60,7 @@ function Get-ChangeLogEntriesFromContent { $changeLogEntry = [pscustomobject]@{ ReleaseVersion = $matches["version"] ReleaseStatus = $matches["releaseStatus"] - ReleaseTitle = "## {0} {1}" -f $matches["version"], $matches["releaseStatus"] + ReleaseTitle = "$releaseTitleAtxHeader {0} {1}" -f $matches["version"], $matches["releaseStatus"] ReleaseContent = @() Sections = @{} } @@ -56,7 +68,7 @@ function Get-ChangeLogEntriesFromContent { } else { if ($changeLogEntry) { - if ($line.Trim() -match "^###\s(?.*)") + if ($line.Trim() -match $sectionHeaderRegex) { $sectionName = $matches["sectionName"].Trim() $changeLogEntry.Sections[$sectionName] = @() @@ -125,16 +137,24 @@ function Confirm-ChangeLogEntry { [String]$ChangeLogLocation, [Parameter(Mandatory = $true)] [String]$VersionString, - [boolean]$ForRelease = $false + [boolean]$ForRelease = $false, + [Switch]$SantizeEntry ) - $changeLogEntry = Get-ChangeLogEntry -ChangeLogLocation $ChangeLogLocation -VersionString $VersionString + $changeLogEntries = Get-ChangeLogEntries -ChangeLogLocation $ChangeLogLocation + $changeLogEntry = $changeLogEntries[$VersionString] if (!$changeLogEntry) { LogError "ChangeLog[${ChangeLogLocation}] does not have an entry for version ${VersionString}." return $false } + if ($SantizeEntry) + { + Remove-EmptySections -ChangeLogEntry $changeLogEntry -InitialAtxHeader $changeLogEntries.InitialAtxHeader + Set-ChangeLogContent -ChangeLogLocation $ChangeLogLocation -ChangeLogEntries $changeLogEntries + } + Write-Host "Found the following change log entry for version '${VersionString}' in [${ChangeLogLocation}]." Write-Host "-----" Write-Host (ChangeLogEntryAsString $changeLogEntry) @@ -209,6 +229,7 @@ function New-ChangeLogEntry { [ValidateNotNullOrEmpty()] [String]$Version, [String]$Status=$CHANGELOG_UNRELEASED_STATUS, + [String]$InitialAtxHeader="#", [String[]]$Content ) @@ -238,17 +259,20 @@ function New-ChangeLogEntry { $Content = @() $Content += "" + $sectionsAtxHeader = $InitialAtxHeader + "##" foreach ($recommendedHeader in $RecommendedSectionHeaders) { - $Content += "### $recommendedHeader" + $Content += "$sectionsAtxHeader $recommendedHeader" $Content += "" } } + $releaseTitleAtxHeader = $initialAtxHeader + "#" + $newChangeLogEntry = [pscustomobject]@{ ReleaseVersion = $Version ReleaseStatus = $Status - ReleaseTitle = "## $Version $Status" + ReleaseTitle = "$releaseTitleAtxHeader $Version $Status" ReleaseContent = $Content } @@ -264,7 +288,7 @@ function Set-ChangeLogContent { ) $changeLogContent = @() - $changeLogContent += "# Release History" + $changeLogContent += "$($ChangeLogEntries.InitialAtxHeader) Release History" $changeLogContent += "" try @@ -289,3 +313,42 @@ function Set-ChangeLogContent { Set-Content -Path $ChangeLogLocation -Value $changeLogContent } + +function Remove-EmptySections { + param ( + [Parameter(Mandatory = $true)] + $ChangeLogEntry, + $InitialAtxHeader = "#" + ) + + $sectionHeaderRegex = "^${InitialAtxHeader}${SECTION_HEADER_REGEX_SUFFIX}" + $releaseContent = $ChangeLogEntry.ReleaseContent + + if ($releaseContent.Count -gt 0) + { + $parsedSections = $ChangeLogEntry.Sections + $sanitizedReleaseContent = New-Object System.Collections.ArrayList(,$releaseContent) + + foreach ($key in @($parsedSections.Keys)) + { + if ([System.String]::IsNullOrWhiteSpace($parsedSections[$key])) + { + for ($i = 0; $i -lt $sanitizedReleaseContent.Count; $i++) + { + $line = $sanitizedReleaseContent[$i] + if ($line -match $sectionHeaderRegex -and $matches["sectionName"].Trim() -eq $key) + { + $sanitizedReleaseContent.RemoveAt($i) + while($i -lt $sanitizedReleaseContent.Count -and [System.String]::IsNullOrWhiteSpace($sanitizedReleaseContent[$i])) + { + $sanitizedReleaseContent.RemoveAt($i) + } + $ChangeLogEntry.Sections.Remove($key) + break + } + } + } + } + $ChangeLogEntry.ReleaseContent = $sanitizedReleaseContent.ToArray() + } +} \ No newline at end of file diff --git a/eng/common/scripts/Detect-Api-Changes.ps1 b/eng/common/scripts/Detect-Api-Changes.ps1 new file mode 100644 index 000000000000..375210171794 --- /dev/null +++ b/eng/common/scripts/Detect-Api-Changes.ps1 @@ -0,0 +1,119 @@ +[CmdletBinding()] +Param ( + [Parameter(Mandatory=$True)] + [string] $ArtifactPath, + [Parameter(Mandatory=$True)] + [string] $PullRequestNumber, + [Parameter(Mandatory=$True)] + [string] $BuildId, + [Parameter(Mandatory=$True)] + [string] $CommitSha, + [Parameter(Mandatory=$True)] + [array] $ArtifactList, + [string] $RepoFullName = "", + [string] $ArtifactName = "packages", + [string] $APIViewUri = "https://apiview.dev/PullRequest/DetectApiChanges" +) + +# Submit API review request and return status whether current revision is approved or pending or failed to create review +function Submit-Request($filePath) +{ + $repoName = $RepoFullName + if (!$repoName) { + $repoName = "azure/azure-sdk-for-$LanguageShort" + } + $query = [System.Web.HttpUtility]::ParseQueryString('') + $query.Add('artifactName', $ArtifactName) + $query.Add('buildId', $BuildId) + $query.Add('filePath', $filePath) + $query.Add('commitSha', $CommitSha) + $query.Add('repoName', $repoName) + $query.Add('pullRequestNumber', $PullRequestNumber) + $uri = [System.UriBuilder]$APIViewUri + $uri.query = $query.toString() + Write-Host "Request URI: $($uri.Uri.OriginalString)" + try + { + $Response = Invoke-WebRequest -Method 'GET' -Uri $uri.Uri -MaximumRetryCount 3 + $StatusCode = $Response.StatusCode + } + catch + { + Write-Host "Error $StatusCode - Exception details: $($_.Exception.Response)" + $StatusCode = $_.Exception.Response.StatusCode + } + + return $StatusCode +} + +function Should-Process-Package($pkgPath, $packageName) +{ + $pkg = Split-Path -Leaf $pkgPath + $configFileDir = Join-Path -Path $ArtifactPath "PackageInfo" + $pkgPropPath = Join-Path -Path $configFileDir "$packageName.json" + if (!(Test-Path $pkgPropPath)) + { + Write-Host " Package property file path $($pkgPropPath) is invalid." + return $False + } + # Get package info from json file created before updating version to daily dev + $pkgInfo = Get-Content $pkgPropPath | ConvertFrom-Json + Write-Host "SDK Type: $($pkgInfo.SdkType)" + return ($pkgInfo.SdkType -eq "client" -and $pkgInfo.IsNewSdk) +} + +function Log-Input-Params() +{ + Write-Host "Artifact Path: $($ArtifactPath)" + Write-Host "Artifact Name: $($ArtifactName)" + Write-Host "PullRequest Number: $($PullRequestNumber)" + Write-Host "BuildId: $($BuildId)" + Write-Host "Language: $($Language)" + Write-Host "Commit SHA: $($CommitSha)" + Write-Host "Repo Name: $($RepoFullName)" +} + +. (Join-Path $PSScriptRoot common.ps1) +Log-Input-Params + +if (!($FindArtifactForApiReviewFn -and (Test-Path "Function:$FindArtifactForApiReviewFn"))) +{ + Write-Host "The function for 'FindArtifactForApiReviewFn' was not found.` + Make sure it is present in eng/scripts/Language-Settings.ps1 and referenced in eng/common/scripts/common.ps1.` + See https://github.com/Azure/azure-sdk-tools/blob/main/doc/common/common_engsys.md#code-structure" + exit 1 +} + +$responses = @{} +foreach ($artifact in $ArtifactList) +{ + Write-Host "Processing $($artifact.name)" + $packages = &$FindArtifactForApiReviewFn $ArtifactPath $artifact.name + if ($packages) + { + $pkgPath = $packages.Values[0] + if (Should-Process-Package -pkgPath $pkgPath -packageName $artifact.name) + { + $filePath = $pkgPath.Replace($ArtifactPath , "").Replace("\", "/") + $respCode = Submit-Request -filePath $filePath + if ($respCode -ne '200') + { + $responses[$artifact.name] = $respCode + } + } + } + else + { + Write-Host "No package is found in artifact path to find API changes for $($artifact.name)" + } +} + +if ($responses) +{ + # Will update this with a link to wiki on how to resolve + Write-Warning "API change detection failed for following packages. Please check above for package level error details." + foreach($pkg in $responses.keys) + { + Write-Host "$pkg failed with $($responses[$pkg]) code" + } +} diff --git a/eng/common/scripts/Prepare-Release.ps1 b/eng/common/scripts/Prepare-Release.ps1 index cee038dd18a0..2005347822fd 100644 --- a/eng/common/scripts/Prepare-Release.ps1 +++ b/eng/common/scripts/Prepare-Release.ps1 @@ -184,11 +184,11 @@ else exit 1 } -$changelogIsValid = Confirm-ChangeLogEntry -ChangeLogLocation $packageProperties.ChangeLogPath -VersionString $newVersion -ForRelease $true +$changelogIsValid = Confirm-ChangeLogEntry -ChangeLogLocation $packageProperties.ChangeLogPath -VersionString $newVersion -ForRelease $true -SantizeEntry if (!$changelogIsValid) { - Write-Host "The changelog [$($packageProperties.ChangeLogPath)] is not valid for release. Please make sure it is valid before queuing release build." -ForegroundColor Red + Write-Warning "The changelog [$($packageProperties.ChangeLogPath)] is not valid for release. Please make sure it is valid before queuing release build." } git diff -s --exit-code $packageProperties.DirectoryPath diff --git a/eng/common/scripts/SemVer.ps1 b/eng/common/scripts/SemVer.ps1 index b3ca5353274d..6459f252a67c 100644 --- a/eng/common/scripts/SemVer.ps1 +++ b/eng/common/scripts/SemVer.ps1 @@ -136,7 +136,7 @@ class AzureEngSemanticVersion : IComparable { { $versionString = "{0}.{1}.{2}" -F $this.Major, $this.Minor, $this.Patch - if ($this.IsPrerelease) + if ($this.IsPrerelease -and $this.PrereleaseLabel -ne "zzz") { $versionString += $this.PrereleaseLabelSeparator + $this.PrereleaseLabel + ` $this.PrereleaseNumberSeparator + $this.PrereleaseNumber diff --git a/eng/common/scripts/Update-ChangeLog.ps1 b/eng/common/scripts/Update-ChangeLog.ps1 index 1524bd7318c1..6e09f369cec1 100644 --- a/eng/common/scripts/Update-ChangeLog.ps1 +++ b/eng/common/scripts/Update-ChangeLog.ps1 @@ -106,47 +106,7 @@ if ($LatestsSorted[0] -ne $Version) { if ($ReplaceLatestEntryTitle) { - # Remove empty sections from content - $sanitizedContent = @() - $sectionContent = @() - $sectionContentCount = 0 - $latesVersionContent = $ChangeLogEntries[$LatestVersion].ReleaseContent - - foreach ($line in $latesVersionContent) - { - if ($line.StartsWith("### ") -or $sectionContentCount -gt 0) - { - if ($line.StartsWith("#") -and $sectionContentCount -gt 1) - { - $sanitizedContent += $sectionContent - $sectionContent = @() - $sectionContentCount = 0 - } - - if ($line.StartsWith("#") -and $sectionContentCount -eq 1) - { - $sectionContent = @() - $sectionContentCount = 0 - } - - $sectionContent += $line - if (-not [System.String]::IsNullOrWhiteSpace($line)) - { - $sectionContentCount++ - } - } - elseif ($sectionContent.Count -eq 0) - { - $sanitizedContent += $line - } - } - - if ($sectionContentCount -gt 1) - { - $sanitizedContent += $sectionContent - } - - $newChangeLogEntry = New-ChangeLogEntry -Version $Version -Status $ReleaseStatus -Content $sanitizedContent + $newChangeLogEntry = New-ChangeLogEntry -Version $Version -Status $ReleaseStatus -InitialAtxHeader $ChangeLogEntries.InitialAtxHeader -Content $ChangeLogEntries[$LatestVersion].ReleaseContent LogDebug "Resetting latest entry title to [$($newChangeLogEntry.ReleaseTitle)]" $ChangeLogEntries.Remove($LatestVersion) if ($newChangeLogEntry) { @@ -161,12 +121,12 @@ elseif ($ChangeLogEntries.Contains($Version)) { LogDebug "Updating ReleaseStatus for Version [$Version] to [$($ReleaseStatus)]" $ChangeLogEntries[$Version].ReleaseStatus = $ReleaseStatus - $ChangeLogEntries[$Version].ReleaseTitle = "## $Version $ReleaseStatus" + $ChangeLogEntries[$Version].ReleaseTitle = "$($ChangeLogEntries.InitialAtxHeader)# $Version $ReleaseStatus" } else { LogDebug "Adding new ChangeLog entry for Version [$Version]" - $newChangeLogEntry = New-ChangeLogEntry -Version $Version -Status $ReleaseStatus + $newChangeLogEntry = New-ChangeLogEntry -Version $Version -Status $ReleaseStatus -InitialAtxHeader $ChangeLogEntries.InitialAtxHeader if ($newChangeLogEntry) { $ChangeLogEntries.Insert(0, $Version, $newChangeLogEntry) } diff --git a/eng/common/scripts/check-spelling-in-changed-files.ps1 b/eng/common/scripts/check-spelling-in-changed-files.ps1 index b0a9611620d5..ebeddc605032 100644 --- a/eng/common/scripts/check-spelling-in-changed-files.ps1 +++ b/eng/common/scripts/check-spelling-in-changed-files.ps1 @@ -1,3 +1,54 @@ +<# +.SYNOPSIS +Uses cspell (from NPM) to check spelling of recently changed files + +.DESCRIPTION +This script checks files that have changed relative to a base branch (default +branch) for spelling errors. Dictionaries and spelling configurations reside +in a configurable `cspell.json` location. + +This script uses `npx` and assumes that NodeJS (and by extension `npm` and +`npx`) are installed on the machine. If it does not detect `npx` it will warn +the user and exit with an error. + +The entire file is scanned, not just changed sections. Spelling errors in parts +of the file not touched will still be shown. + +This script copies the config file supplied in CspellConfigPath to a temporary +location, mutates the config file to include only the files that have changed, +and then uses the mutated config file to call cspell. In the case of success +the temporary file is deleted. In the case of failure the temporary file, whose +location was logged to the console, remains on disk. + +.PARAMETER TargetBranch +Git ref to compare changes. This is usually the "base" (GitHub) or "target" +(DevOps) branch for which a pull request would be opened. + +.PARAMETER SourceBranch +Git ref to use instead of changes in current repo state. Use `HEAD` here to +check spelling of files that have been committed and exclude any new files or +modified files that are not committed. This is most useful in CI scenarios where +builds may have modified the state of the repo. Leaving this parameter blank +includes files for whom changes have not been committed. + +.PARAMETER CspellConfigPath +Optional location to use for cspell.json path. Default value is +`./.vscode/cspell.json` + +.PARAMETER ExitWithError +Exit with error code 1 if spelling errors are detected. + +.PARAMETER Test +Run test functions against the script logic + +.EXAMPLE +./eng/common/scripts/check-spelling-in-changed-files.ps1 -TargetBranch 'target_branch_name' + +This will run spell check with changes in the current branch with respect to +`target_branch_name` + +#> + [CmdletBinding()] Param ( [Parameter()] @@ -10,18 +61,205 @@ Param ( [string] $CspellConfigPath = "./.vscode/cspell.json", [Parameter()] - [switch] $ExitWithError + [switch] $ExitWithError, + + [Parameter()] + [switch] $Test ) +Set-StrictMode -Version 3.0 + +function TestSpellChecker() { + Test-Exit0WhenAllFilesExcluded + ResetTest + Test-Exit1WhenIncludedFileHasSpellingError + ResetTest + Test-Exit0WhenIncludedFileHasNoSpellingError + ResetTest + Test-Exit1WhenChangedFileAlreadyHasSpellingError + ResetTest + Test-Exit0WhenUnalteredFileHasSpellingError + ResetTest + Test-Exit0WhenSpellingErrorsAndNoExitWithError +} + +function Test-Exit0WhenAllFilesExcluded() { + # Arrange + "sepleing errrrrorrrrr" > ./excluded/excluded-file.txt + git add -A + git commit -m "One change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 ` + -ExitWithError + + # Assert + if ($LASTEXITCODE -ne 0) { + throw "`$LASTEXITCODE != 0" + } +} + +function Test-Exit1WhenIncludedFileHasSpellingError() { + # Arrange + "sepleing errrrrorrrrr" > ./included/included-file.txt + git add -A + git commit -m "One change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 ` + -ExitWithError + + # Assert + if ($LASTEXITCODE -ne 1) { + throw "`$LASTEXITCODE != 1" + } +} + +function Test-Exit0WhenIncludedFileHasNoSpellingError() { + # Arrange + "correct spelling" > ./included/included-file.txt + git add -A + git commit -m "One change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 ` + -ExitWithError + + # Assert + if ($LASTEXITCODE -ne 0) { + throw "`$LASTEXITCODE != 0" + } +} + +function Test-Exit1WhenChangedFileAlreadyHasSpellingError() { + # Arrange + "sepleing errrrrorrrrr" > ./included/included-file.txt + git add -A + git commit -m "First change" + + "A statement without spelling errors" >> ./included/included-file.txt + git add -A + git commit -m "Second change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 ` + -ExitWithError + + # Assert + if ($LASTEXITCODE -ne 1) { + throw "`$LASTEXITCODE != 1" + } +} + +function Test-Exit0WhenUnalteredFileHasSpellingError() { + # Arrange + "sepleing errrrrorrrrr" > ./included/included-file-1.txt + git add -A + git commit -m "One change" + + "A statement without spelling errors" > ./included/included-file-2.txt + git add -A + git commit -m "Second change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 ` + -ExitWithError + + # Assert + if ($LASTEXITCODE -ne 0) { + throw "`$LASTEXITCODE != 0" + } +} + +function Test-Exit0WhenSpellingErrorsAndNoExitWithError() { + # Arrange + "sepleing errrrrorrrrr" > ./included/included-file-1.txt + git add -A + git commit -m "One change" + + # Act + &"$PSScriptRoot/check-spelling-in-changed-files.ps1" ` + -TargetBranch HEAD~1 + + # Assert + if ($LASTEXITCODE -ne 0) { + throw "`$LASTEXITCODE != 0" + } +} + +function SetupTest($workingDirectory) { + Write-Host "Create test temp dir: $workingDirectory" + New-Item -ItemType Directory -Force -Path $workingDirectory | Out-Null + + Push-Location $workingDirectory | Out-Null + git init + + New-Item -ItemType Directory -Force -Path "./excluded" + New-Item -ItemType Directory -Force -Path "./included" + New-Item -ItemType Directory -Force -Path "./.vscode" + + "Placeholder" > "./excluded/placeholder.txt" + "Placeholder" > "./included/placeholder.txt" + + $configJsonContent = @" +{ + "version": "0.1", + "language": "en", + "ignorePaths": [ + ".vscode/cspell.json", + "excluded/**" + ] +} +"@ + $configJsonContent > "./.vscode/cspell.json" + + git add -A + git commit -m "Init" +} + +function ResetTest() { + # Empty out the working tree + git checkout . + git clean -xdf + + $revCount = git rev-list --count HEAD + if ($revCount -gt 1) { + # Reset N-1 changes so there is only the initial commit + $revisionsToReset = $revCount - 1 + git reset --hard HEAD~$revisionsToReset + } +} + +function TeardownTest($workingDirectory) { + Pop-Location | Out-Null + Write-Host "Remove test temp dir: $workingDirectory" + Remove-Item -Path $workingDirectory -Recurse -Force | Out-Null +} + +if ($Test) { + $workingDirectory = Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName()) + + SetupTest $workingDirectory + TestSpellChecker + TeardownTest $workingDirectory + Write-Host "Test complete" + exit 0 +} + $ErrorActionPreference = "Continue" -. $PSScriptRoot/logging.ps1 +. $PSScriptRoot/common.ps1 -if ((Get-Command git | Measure-Object).Count -eq 0) { +if ((Get-Command git | Measure-Object).Count -eq 0) { LogError "Could not locate git. Install git https://git-scm.com/downloads" exit 1 } -if ((Get-Command npx | Measure-Object).Count -eq 0) { +if ((Get-Command npx | Measure-Object).Count -eq 0) { LogError "Could not locate npx. Install NodeJS (includes npx and npx) https://nodejs.org/en/download/" exit 1 } @@ -31,7 +269,7 @@ if (!(Test-Path $CspellConfigPath)) { exit 1 } -# Lists names of files that were in some way changed between the +# Lists names of files that were in some way changed between the # current $SourceBranch and $TargetBranch. Excludes files that were deleted to # prevent errors in Resolve-Path Write-Host "git diff --diff-filter=d --name-only $TargetBranch $SourceBranch" @@ -51,11 +289,12 @@ foreach ($file in $changedFiles) { $changedFilePaths += $file.Path } -# Using GetTempPath because it works on linux and windows. Setting .json -# extension because cspell requires the file have a .json or .jsonc extension -$cspellConfigTemporaryPath = Join-Path ` - ([System.IO.Path]::GetTempPath()) ` - "$([System.IO.Path]::GetRandomFileName()).json" +# The "files" list must always contain a file which exists, is not empty, and is +# not excluded in ignorePaths. In this case it will be a file with the contents +# "1" (no spelling errors will be detected) +$notExcludedFile = (New-TemporaryFile).ToString() +"1" >> $notExcludedFile +$changedFilePaths += $notExcludedFile $cspellConfigContent = Get-Content $CspellConfigPath -Raw $cspellConfig = ConvertFrom-Json $cspellConfigContent @@ -74,14 +313,17 @@ Add-Member ` # Set the temporary config file with the mutated configuration. The temporary # location is used to configure the command and the original file remains # unchanged. -Write-Host "Setting config in: $cspellConfigTemporaryPath" +Write-Host "Setting config in: $CspellConfigPath" Set-Content ` - -Path $cspellConfigTemporaryPath ` + -Path $CspellConfigPath ` -Value (ConvertTo-Json $cspellConfig -Depth 100) # Use the mutated configuration file when calling cspell -Write-Host "npx cspell lint --config $cspellConfigTemporaryPath" -$spellingErrors = npx cspell lint --config $cspellConfigTemporaryPath +Write-Host "npx cspell lint --config $CspellConfigPath --no-must-find-files " +$spellingErrors = npx cspell lint --config $CspellConfigPath --no-must-find-files + +Write-Host "cspell run complete, restoring original configuration." +Set-Content -Path $CspellConfigPath -Value $cspellConfigContent -NoNewLine if ($spellingErrors) { $errorLoggingFunction = Get-Item 'Function:LogWarning' @@ -89,7 +331,7 @@ if ($spellingErrors) { $errorLoggingFunction = Get-Item 'Function:LogError' } - foreach ($spellingError in $spellingErrors) { + foreach ($spellingError in $spellingErrors) { &$errorLoggingFunction $spellingError } &$errorLoggingFunction "Spelling errors detected. To correct false positives or learn about spell checking see: https://aka.ms/azsdk/engsys/spellcheck" @@ -98,56 +340,8 @@ if ($spellingErrors) { exit 1 } } else { - Write-Host "No spelling errors detected. Removing temporary config file." - Remove-Item -Path $cspellConfigTemporaryPath -Force + Write-Host "No spelling errors detected. Removing temporary file." + Remove-Item -Path $notExcludedFile -Force | Out-Null } exit 0 - -<# -.SYNOPSIS -Uses cspell (from NPM) to check spelling of recently changed files - -.DESCRIPTION -This script checks files that have changed relative to a base branch (default -branch) for spelling errors. Dictionaries and spelling configurations reside -in a configurable `cspell.json` location. - -This script uses `npx` and assumes that NodeJS (and by extension `npm` and -`npx`) are installed on the machine. If it does not detect `npx` it will warn -the user and exit with an error. - -The entire file is scanned, not just changed sections. Spelling errors in parts -of the file not touched will still be shown. - -This script copies the config file supplied in CspellConfigPath to a temporary -location, mutates the config file to include only the files that have changed, -and then uses the mutated config file to call cspell. In the case of success -the temporary file is deleted. In the case of failure the temporary file, whose -location was logged to the console, remains on disk. - -.PARAMETER TargetBranch -Git ref to compare changes. This is usually the "base" (GitHub) or "target" -(DevOps) branch for which a pull request would be opened. - -.PARAMETER SourceBranch -Git ref to use instead of changes in current repo state. Use `HEAD` here to -check spelling of files that have been committed and exclude any new files or -modified files that are not committed. This is most useful in CI scenarios where -builds may have modified the state of the repo. Leaving this parameter blank -includes files for whom changes have not been committed. - -.PARAMETER CspellConfigPath -Optional location to use for cspell.json path. Default value is -`./.vscode/cspell.json` - -.PARAMETER ExitWithError -Exit with error code 1 if spelling errors are detected. - -.EXAMPLE -./eng/common/scripts/check-spelling-in-changed-files.ps1 -TargetBranch 'target_branch_name' - -This will run spell check with changes in the current branch with respect to -`target_branch_name` - -#> diff --git a/eng/test_tools.txt b/eng/test_tools.txt index b7f808980878..27baeebf2d50 100644 --- a/eng/test_tools.txt +++ b/eng/test_tools.txt @@ -10,6 +10,7 @@ pytest-xdist==1.32.0 # we pin coverage to 4.5.4 because there is an bug with `pytest-cov`. the generated coverage files cannot be `coverage combine`ed coverage==4.5.4 bandit==1.6.2 +protobuf==3.17.3; python_version == '2.7' # locking packages defined as deps from azure-sdk-tools or azure-devtools pytoml==0.1.21 diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/CHANGELOG.md b/sdk/azurearcdata/azure-mgmt-azurearcdata/CHANGELOG.md new file mode 100644 index 000000000000..f24747b3b8cc --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0b1 (2021-09-15) + +* Initial Release diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/MANIFEST.in b/sdk/azurearcdata/azure-mgmt-azurearcdata/MANIFEST.in new file mode 100644 index 000000000000..3a9b6517412b --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/MANIFEST.in @@ -0,0 +1,6 @@ +include _meta.json +recursive-include tests *.py *.yaml +include *.md +include azure/__init__.py +include azure/mgmt/__init__.py + diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/README.md b/sdk/azurearcdata/azure-mgmt-azurearcdata/README.md new file mode 100644 index 000000000000..98453a844f83 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/README.md @@ -0,0 +1,27 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure Azurearcdata Management Client Library. +This package has been tested with Python 2.7, 3.6+. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + + +# Usage + + +To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt) + + + +For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure/) +Code samples for this package can be found at [Azurearcdata Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com. +Additional code samples for different Azure services are available at [Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +# Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-azurearcdata%2FREADME.png) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/_meta.json b/sdk/azurearcdata/azure-mgmt-azurearcdata/_meta.json new file mode 100644 index 000000000000..39f2055663a1 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.5", + "use": [ + "@autorest/python@5.8.4", + "@autorest/modelerfour@4.19.2" + ], + "commit": "e23a590abc2f98437bafac9da5ccd608b2981a45", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/azurearcdata/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", + "readme": "specification/azurearcdata/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/__init__.py new file mode 100644 index 000000000000..34107556bef3 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_arc_data_management_client import AzureArcDataManagementClient +from ._version import VERSION + +__version__ = VERSION +__all__ = ['AzureArcDataManagementClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_azure_arc_data_management_client.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_azure_arc_data_management_client.py new file mode 100644 index 000000000000..8ddeb5be5dc3 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_azure_arc_data_management_client.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from ._configuration import AzureArcDataManagementClientConfiguration +from .operations import Operations +from .operations import SqlManagedInstancesOperations +from .operations import SqlServerInstancesOperations +from .operations import DataControllersOperations +from . import models + + +class AzureArcDataManagementClient(object): + """The AzureArcData management API provides a RESTful set of web APIs to manage Azure Data Services on Azure Arc Resources. + + :ivar operations: Operations operations + :vartype operations: azure_arc_data_management_client.operations.Operations + :ivar sql_managed_instances: SqlManagedInstancesOperations operations + :vartype sql_managed_instances: azure_arc_data_management_client.operations.SqlManagedInstancesOperations + :ivar sql_server_instances: SqlServerInstancesOperations operations + :vartype sql_server_instances: azure_arc_data_management_client.operations.SqlServerInstancesOperations + :ivar data_controllers: DataControllersOperations operations + :vartype data_controllers: azure_arc_data_management_client.operations.DataControllersOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the Azure subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = AzureArcDataManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_managed_instances = SqlManagedInstancesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_server_instances = SqlServerInstancesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_controllers = DataControllersOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureArcDataManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_configuration.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_configuration.py new file mode 100644 index 000000000000..06e0bd8f8958 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_configuration.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + + +class AzureArcDataManagementClientConfiguration(Configuration): + """Configuration for AzureArcDataManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the Azure subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(AzureArcDataManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-08-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-azurearcdata/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_metadata.json b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_metadata.json new file mode 100644 index 000000000000..cf806c606565 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_metadata.json @@ -0,0 +1,106 @@ +{ + "chosen_version": "2021-08-01", + "total_api_version_list": ["2021-08-01"], + "client": { + "name": "AzureArcDataManagementClient", + "filename": "_azure_arc_data_management_client", + "description": "The AzureArcData management API provides a RESTful set of web APIs to manage Azure Data Services on Azure Arc Resources.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureArcDataManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureArcDataManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The ID of the Azure subscription.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The ID of the Azure subscription.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "operations": "Operations", + "sql_managed_instances": "SqlManagedInstancesOperations", + "sql_server_instances": "SqlServerInstancesOperations", + "data_controllers": "DataControllersOperations" + } +} \ No newline at end of file diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_version.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_version.py new file mode 100644 index 000000000000..e5754a47ce68 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/__init__.py new file mode 100644 index 000000000000..57b761c5d123 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_arc_data_management_client import AzureArcDataManagementClient +__all__ = ['AzureArcDataManagementClient'] diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_azure_arc_data_management_client.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_azure_arc_data_management_client.py new file mode 100644 index 000000000000..c134ef59a1b8 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_azure_arc_data_management_client.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import AzureArcDataManagementClientConfiguration +from .operations import Operations +from .operations import SqlManagedInstancesOperations +from .operations import SqlServerInstancesOperations +from .operations import DataControllersOperations +from .. import models + + +class AzureArcDataManagementClient(object): + """The AzureArcData management API provides a RESTful set of web APIs to manage Azure Data Services on Azure Arc Resources. + + :ivar operations: Operations operations + :vartype operations: azure_arc_data_management_client.aio.operations.Operations + :ivar sql_managed_instances: SqlManagedInstancesOperations operations + :vartype sql_managed_instances: azure_arc_data_management_client.aio.operations.SqlManagedInstancesOperations + :ivar sql_server_instances: SqlServerInstancesOperations operations + :vartype sql_server_instances: azure_arc_data_management_client.aio.operations.SqlServerInstancesOperations + :ivar data_controllers: DataControllersOperations operations + :vartype data_controllers: azure_arc_data_management_client.aio.operations.DataControllersOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the Azure subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = AzureArcDataManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_managed_instances = SqlManagedInstancesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_server_instances = SqlServerInstancesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_controllers = DataControllersOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureArcDataManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_configuration.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_configuration.py new file mode 100644 index 000000000000..dbff5e916631 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/_configuration.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class AzureArcDataManagementClientConfiguration(Configuration): + """Configuration for AzureArcDataManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the Azure subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(AzureArcDataManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-08-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-azurearcdata/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/__init__.py new file mode 100644 index 000000000000..71f1823a000e --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._sql_managed_instances_operations import SqlManagedInstancesOperations +from ._sql_server_instances_operations import SqlServerInstancesOperations +from ._data_controllers_operations import DataControllersOperations + +__all__ = [ + 'Operations', + 'SqlManagedInstancesOperations', + 'SqlServerInstancesOperations', + 'DataControllersOperations', +] diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_data_controllers_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_data_controllers_operations.py new file mode 100644 index 000000000000..a2358f6cad23 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_data_controllers_operations.py @@ -0,0 +1,550 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataControllersOperations: + """DataControllersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_in_subscription( + self, + **kwargs: Any + ) -> AsyncIterable["_models.PageOfDataControllerResource"]: + """List dataController resources in the subscription. + + List dataController resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PageOfDataControllerResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.PageOfDataControllerResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageOfDataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_in_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PageOfDataControllerResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_in_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/dataControllers'} # type: ignore + + def list_in_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PageOfDataControllerResource"]: + """List dataController resources in the resource group. + + List dataController resources in the resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PageOfDataControllerResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.PageOfDataControllerResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageOfDataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_in_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PageOfDataControllerResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_in_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers'} # type: ignore + + async def _put_data_controller_initial( + self, + resource_group_name: str, + data_controller_name: str, + data_controller_resource: "_models.DataControllerResource", + **kwargs: Any + ) -> "_models.DataControllerResource": + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._put_data_controller_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_controller_resource, 'DataControllerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _put_data_controller_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + async def begin_put_data_controller( + self, + resource_group_name: str, + data_controller_name: str, + data_controller_resource: "_models.DataControllerResource", + **kwargs: Any + ) -> AsyncLROPoller["_models.DataControllerResource"]: + """Creates or replaces a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :param data_controller_resource: desc. + :type data_controller_resource: ~azure_arc_data_management_client.models.DataControllerResource + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataControllerResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_arc_data_management_client.models.DataControllerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._put_data_controller_initial( + resource_group_name=resource_group_name, + data_controller_name=data_controller_name, + data_controller_resource=data_controller_resource, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_put_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + async def _delete_data_controller_initial( + self, + resource_group_name: str, + data_controller_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_data_controller_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_data_controller_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + async def begin_delete_data_controller( + self, + resource_group_name: str, + data_controller_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_data_controller_initial( + resource_group_name=resource_group_name, + data_controller_name=data_controller_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + async def get_data_controller( + self, + resource_group_name: str, + data_controller_name: str, + **kwargs: Any + ) -> "_models.DataControllerResource": + """Retrieves a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataControllerResource, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.DataControllerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get_data_controller.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + async def patch_data_controller( + self, + resource_group_name: str, + data_controller_name: str, + data_controller_resource: "_models.DataControllerUpdate", + **kwargs: Any + ) -> "_models.DataControllerResource": + """Updates a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :param data_controller_resource: The update data controller resource. + :type data_controller_resource: ~azure_arc_data_management_client.models.DataControllerUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataControllerResource, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.DataControllerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch_data_controller.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_controller_resource, 'DataControllerUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_operations.py new file mode 100644 index 000000000000..90194487b71b --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: + """Lists all of the available Azure Data Services on Azure Arc API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.AzureArcData/operations'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_managed_instances_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_managed_instances_operations.py new file mode 100644 index 000000000000..ba47da5e7381 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_managed_instances_operations.py @@ -0,0 +1,550 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SqlManagedInstancesOperations: + """SqlManagedInstancesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.SqlManagedInstanceListResult"]: + """List sqlManagedInstance resources in the subscription. + + List sqlManagedInstance resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlManagedInstanceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.SqlManagedInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('SqlManagedInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/sqlManagedInstances'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.SqlManagedInstanceListResult"]: + """List sqlManagedInstance resources in the resource group. + + Gets all sqlManagedInstances in a resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlManagedInstanceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.SqlManagedInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('SqlManagedInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances'} # type: ignore + + async def get( + self, + resource_group_name: str, + sql_managed_instance_name: str, + **kwargs: Any + ) -> "_models.SqlManagedInstance": + """Retrieves a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: Name of SQL Managed Instance. + :type sql_managed_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlManagedInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlManagedInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + sql_managed_instance_name: str, + sql_managed_instance: "_models.SqlManagedInstance", + **kwargs: Any + ) -> "_models.SqlManagedInstance": + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sql_managed_instance, 'SqlManagedInstance') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + async def begin_create( + self, + resource_group_name: str, + sql_managed_instance_name: str, + sql_managed_instance: "_models.SqlManagedInstance", + **kwargs: Any + ) -> AsyncLROPoller["_models.SqlManagedInstance"]: + """Creates or replaces a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: The name of SQL Managed Instances. + :type sql_managed_instance_name: str + :param sql_managed_instance: The SQL Managed Instance to be created or updated. + :type sql_managed_instance: ~azure_arc_data_management_client.models.SqlManagedInstance + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SqlManagedInstance or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_arc_data_management_client.models.SqlManagedInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + sql_managed_instance_name=sql_managed_instance_name, + sql_managed_instance=sql_managed_instance, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + sql_managed_instance_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + sql_managed_instance_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: The name of Sql Managed Instances. + :type sql_managed_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + sql_managed_instance_name=sql_managed_instance_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + sql_managed_instance_name: str, + parameters: "_models.SqlManagedInstanceUpdate", + **kwargs: Any + ) -> "_models.SqlManagedInstance": + """Updates a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: Name of sqlManagedInstance. + :type sql_managed_instance_name: str + :param parameters: The SQL Managed Instance. + :type parameters: ~azure_arc_data_management_client.models.SqlManagedInstanceUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlManagedInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlManagedInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SqlManagedInstanceUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_server_instances_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_server_instances_operations.py new file mode 100644 index 000000000000..31bb1b0649ba --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/aio/operations/_sql_server_instances_operations.py @@ -0,0 +1,550 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SqlServerInstancesOperations: + """SqlServerInstancesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.SqlServerInstanceListResult"]: + """List sqlServerInstance resources in the subscription. + + List sqlServerInstance resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlServerInstanceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.SqlServerInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('SqlServerInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/sqlServerInstances'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.SqlServerInstanceListResult"]: + """List sqlServerInstance resources in the resource group. + + Gets all sqlServerInstances in a resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlServerInstanceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_arc_data_management_client.models.SqlServerInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('SqlServerInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances'} # type: ignore + + async def get( + self, + resource_group_name: str, + sql_server_instance_name: str, + **kwargs: Any + ) -> "_models.SqlServerInstance": + """Retrieves a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: Name of SQL Server Instance. + :type sql_server_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlServerInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlServerInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + sql_server_instance_name: str, + sql_server_instance: "_models.SqlServerInstance", + **kwargs: Any + ) -> "_models.SqlServerInstance": + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sql_server_instance, 'SqlServerInstance') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + async def begin_create( + self, + resource_group_name: str, + sql_server_instance_name: str, + sql_server_instance: "_models.SqlServerInstance", + **kwargs: Any + ) -> AsyncLROPoller["_models.SqlServerInstance"]: + """Creates or replaces a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: The name of SQL Server Instance. + :type sql_server_instance_name: str + :param sql_server_instance: The SQL Server Instance to be created or updated. + :type sql_server_instance: ~azure_arc_data_management_client.models.SqlServerInstance + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SqlServerInstance or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_arc_data_management_client.models.SqlServerInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + sql_server_instance_name=sql_server_instance_name, + sql_server_instance=sql_server_instance, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + sql_server_instance_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + sql_server_instance_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: The name of SQL Server Instance. + :type sql_server_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + sql_server_instance_name=sql_server_instance_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + sql_server_instance_name: str, + parameters: "_models.SqlServerInstanceUpdate", + **kwargs: Any + ) -> "_models.SqlServerInstance": + """Updates a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: Name of sqlServerInstance. + :type sql_server_instance_name: str + :param parameters: The SQL Server Instance. + :type parameters: ~azure_arc_data_management_client.models.SqlServerInstanceUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlServerInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlServerInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SqlServerInstanceUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/__init__.py new file mode 100644 index 000000000000..be72068b045e --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/__init__.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import BasicLoginInformation + from ._models_py3 import CommonSku + from ._models_py3 import DataControllerProperties + from ._models_py3 import DataControllerResource + from ._models_py3 import DataControllerUpdate + from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponseBody + from ._models_py3 import ExtendedLocation + from ._models_py3 import Identity + from ._models_py3 import K8SResourceRequirements + from ._models_py3 import K8SScheduling + from ._models_py3 import K8SSchedulingOptions + from ._models_py3 import LogAnalyticsWorkspaceConfig + from ._models_py3 import ODataError + from ._models_py3 import OnPremiseProperty + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import PageOfDataControllerResource + from ._models_py3 import Plan + from ._models_py3 import ProxyResource + from ._models_py3 import Resource + from ._models_py3 import ResourceSku + from ._models_py3 import SqlManagedInstance + from ._models_py3 import SqlManagedInstanceK8SRaw + from ._models_py3 import SqlManagedInstanceK8SSpec + from ._models_py3 import SqlManagedInstanceListResult + from ._models_py3 import SqlManagedInstanceProperties + from ._models_py3 import SqlManagedInstanceSku + from ._models_py3 import SqlManagedInstanceUpdate + from ._models_py3 import SqlServerInstance + from ._models_py3 import SqlServerInstanceListResult + from ._models_py3 import SqlServerInstanceProperties + from ._models_py3 import SqlServerInstanceUpdate + from ._models_py3 import SystemData + from ._models_py3 import TrackedResource + from ._models_py3 import UploadServicePrincipal + from ._models_py3 import UploadWatermark +except (SyntaxError, ImportError): + from ._models import BasicLoginInformation # type: ignore + from ._models import CommonSku # type: ignore + from ._models import DataControllerProperties # type: ignore + from ._models import DataControllerResource # type: ignore + from ._models import DataControllerUpdate # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import ErrorResponseBody # type: ignore + from ._models import ExtendedLocation # type: ignore + from ._models import Identity # type: ignore + from ._models import K8SResourceRequirements # type: ignore + from ._models import K8SScheduling # type: ignore + from ._models import K8SSchedulingOptions # type: ignore + from ._models import LogAnalyticsWorkspaceConfig # type: ignore + from ._models import ODataError # type: ignore + from ._models import OnPremiseProperty # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import PageOfDataControllerResource # type: ignore + from ._models import Plan # type: ignore + from ._models import ProxyResource # type: ignore + from ._models import Resource # type: ignore + from ._models import ResourceSku # type: ignore + from ._models import SqlManagedInstance # type: ignore + from ._models import SqlManagedInstanceK8SRaw # type: ignore + from ._models import SqlManagedInstanceK8SSpec # type: ignore + from ._models import SqlManagedInstanceListResult # type: ignore + from ._models import SqlManagedInstanceProperties # type: ignore + from ._models import SqlManagedInstanceSku # type: ignore + from ._models import SqlManagedInstanceUpdate # type: ignore + from ._models import SqlServerInstance # type: ignore + from ._models import SqlServerInstanceListResult # type: ignore + from ._models import SqlServerInstanceProperties # type: ignore + from ._models import SqlServerInstanceUpdate # type: ignore + from ._models import SystemData # type: ignore + from ._models import TrackedResource # type: ignore + from ._models import UploadServicePrincipal # type: ignore + from ._models import UploadWatermark # type: ignore + +from ._azure_arc_data_management_client_enums import ( + ArcSqlManagedInstanceLicenseType, + ArcSqlServerLicenseType, + ConnectionStatus, + DefenderStatus, + EditionType, + ExtendedLocationTypes, + IdentityType, + Infrastructure, + OperationOrigin, + SqlManagedInstanceSkuTier, + SqlVersion, +) + +__all__ = [ + 'BasicLoginInformation', + 'CommonSku', + 'DataControllerProperties', + 'DataControllerResource', + 'DataControllerUpdate', + 'ErrorResponse', + 'ErrorResponseBody', + 'ExtendedLocation', + 'Identity', + 'K8SResourceRequirements', + 'K8SScheduling', + 'K8SSchedulingOptions', + 'LogAnalyticsWorkspaceConfig', + 'ODataError', + 'OnPremiseProperty', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'PageOfDataControllerResource', + 'Plan', + 'ProxyResource', + 'Resource', + 'ResourceSku', + 'SqlManagedInstance', + 'SqlManagedInstanceK8SRaw', + 'SqlManagedInstanceK8SSpec', + 'SqlManagedInstanceListResult', + 'SqlManagedInstanceProperties', + 'SqlManagedInstanceSku', + 'SqlManagedInstanceUpdate', + 'SqlServerInstance', + 'SqlServerInstanceListResult', + 'SqlServerInstanceProperties', + 'SqlServerInstanceUpdate', + 'SystemData', + 'TrackedResource', + 'UploadServicePrincipal', + 'UploadWatermark', + 'ArcSqlManagedInstanceLicenseType', + 'ArcSqlServerLicenseType', + 'ConnectionStatus', + 'DefenderStatus', + 'EditionType', + 'ExtendedLocationTypes', + 'IdentityType', + 'Infrastructure', + 'OperationOrigin', + 'SqlManagedInstanceSkuTier', + 'SqlVersion', +] diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_azure_arc_data_management_client_enums.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_azure_arc_data_management_client_enums.py new file mode 100644 index 000000000000..fac1c2934e44 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_azure_arc_data_management_client_enums.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class ArcSqlManagedInstanceLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The license type to apply for this managed instance. + """ + + BASE_PRICE = "BasePrice" + LICENSE_INCLUDED = "LicenseIncluded" + +class ArcSqlServerLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """SQL Server license type. + """ + + PAID = "Paid" + FREE = "Free" + HADR = "HADR" + UNDEFINED = "Undefined" + +class ConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The cloud connectivity status. + """ + + CONNECTED = "Connected" + DISCONNECTED = "Disconnected" + UNKNOWN = "Unknown" + +class DefenderStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Status of Azure Defender. + """ + + PROTECTED = "Protected" + UNPROTECTED = "Unprotected" + UNKNOWN = "Unknown" + +class EditionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """SQL Server edition. + """ + + EVALUATION = "Evaluation" + ENTERPRISE = "Enterprise" + STANDARD = "Standard" + WEB = "Web" + DEVELOPER = "Developer" + EXPRESS = "Express" + +class ExtendedLocationTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of extendedLocation. + """ + + CUSTOM_LOCATION = "CustomLocation" + +class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that creates/modifies resources + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class Infrastructure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The infrastructure the data controller is running on. + """ + + AZURE = "azure" + GCP = "gcp" + AWS = "aws" + ALIBABA = "alibaba" + ONPREMISES = "onpremises" + OTHER = "other" + +class OperationOrigin(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The intended executor of the operation. + """ + + USER = "user" + SYSTEM = "system" + +class SqlManagedInstanceSkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The pricing tier for the instance. + """ + + GENERAL_PURPOSE = "GeneralPurpose" + BUSINESS_CRITICAL = "BusinessCritical" + +class SqlVersion(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """SQL Server version. + """ + + SQL_SERVER2019 = "SQL Server 2019" + SQL_SERVER2017 = "SQL Server 2017" + SQL_SERVER2016 = "SQL Server 2016" diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models.py new file mode 100644 index 000000000000..1cfa963b8a04 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models.py @@ -0,0 +1,1442 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class BasicLoginInformation(msrest.serialization.Model): + """Username and password for basic login authentication. + + :param username: Login username. + :type username: str + :param password: Login password. + :type password: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BasicLoginInformation, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + + +class CommonSku(msrest.serialization.Model): + """The resource model definition representing SKU for ARM resources. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the SKU. It is typically a letter+number code. + :type name: str + :param dev: Whether dev/test is enabled. When the dev field is set to true, the resource is + used for dev/test purpose. + :type dev: bool + :param size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :type size: str + :param family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :type family: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'dev': {'key': 'dev', 'type': 'bool'}, + 'size': {'key': 'size', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(CommonSku, self).__init__(**kwargs) + self.name = kwargs['name'] + self.dev = kwargs.get('dev', True) + self.size = kwargs.get('size', None) + self.family = kwargs.get('family', None) + self.capacity = kwargs.get('capacity', None) + + +class DataControllerProperties(msrest.serialization.Model): + """The data controller properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param infrastructure: The infrastructure the data controller is running on. Possible values + include: "azure", "gcp", "aws", "alibaba", "onpremises", "other". Default value: "other". + :type infrastructure: str or ~azure_arc_data_management_client.models.Infrastructure + :param on_premise_property: Properties from the Kubernetes data controller. + :type on_premise_property: ~azure_arc_data_management_client.models.OnPremiseProperty + :param k8_s_raw: The raw kubernetes information. + :type k8_s_raw: any + :param upload_watermark: Properties on upload watermark. Mostly timestamp for each upload data + type. + :type upload_watermark: ~azure_arc_data_management_client.models.UploadWatermark + :param last_uploaded_date: Last uploaded date from Kubernetes cluster. Defaults to current date + time. + :type last_uploaded_date: ~datetime.datetime + :param basic_login_information: Username and password for basic login authentication. + :type basic_login_information: ~azure_arc_data_management_client.models.BasicLoginInformation + :param log_analytics_workspace_config: Log analytics workspace id and primary key. + :type log_analytics_workspace_config: + ~azure_arc_data_management_client.models.LogAnalyticsWorkspaceConfig + :param upload_service_principal: Service principal for uploading billing, metrics and logs. + :type upload_service_principal: ~azure_arc_data_management_client.models.UploadServicePrincipal + :ivar provisioning_state: + :vartype provisioning_state: str + :param cluster_id: If a CustomLocation is provided, this contains the ARM id of the connected + cluster the custom location belongs to. + :type cluster_id: str + :param extension_id: If a CustomLocation is provided, this contains the ARM id of the extension + the custom location belongs to. + :type extension_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'infrastructure': {'key': 'infrastructure', 'type': 'str'}, + 'on_premise_property': {'key': 'onPremiseProperty', 'type': 'OnPremiseProperty'}, + 'k8_s_raw': {'key': 'k8sRaw', 'type': 'object'}, + 'upload_watermark': {'key': 'uploadWatermark', 'type': 'UploadWatermark'}, + 'last_uploaded_date': {'key': 'lastUploadedDate', 'type': 'iso-8601'}, + 'basic_login_information': {'key': 'basicLoginInformation', 'type': 'BasicLoginInformation'}, + 'log_analytics_workspace_config': {'key': 'logAnalyticsWorkspaceConfig', 'type': 'LogAnalyticsWorkspaceConfig'}, + 'upload_service_principal': {'key': 'uploadServicePrincipal', 'type': 'UploadServicePrincipal'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'extension_id': {'key': 'extensionId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataControllerProperties, self).__init__(**kwargs) + self.infrastructure = kwargs.get('infrastructure', "other") + self.on_premise_property = kwargs.get('on_premise_property', None) + self.k8_s_raw = kwargs.get('k8_s_raw', None) + self.upload_watermark = kwargs.get('upload_watermark', None) + self.last_uploaded_date = kwargs.get('last_uploaded_date', None) + self.basic_login_information = kwargs.get('basic_login_information', None) + self.log_analytics_workspace_config = kwargs.get('log_analytics_workspace_config', None) + self.upload_service_principal = kwargs.get('upload_service_principal', None) + self.provisioning_state = None + self.cluster_id = kwargs.get('cluster_id', None) + self.extension_id = kwargs.get('extension_id', None) + + +class Resource(msrest.serialization.Model): + """Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__( + self, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs['location'] + self.system_data = None + + +class DataControllerResource(TrackedResource): + """Data controller resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure_arc_data_management_client.models.ExtendedLocation + :param properties: Required. The data controller's properties. + :type properties: ~azure_arc_data_management_client.models.DataControllerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'properties': {'key': 'properties', 'type': 'DataControllerProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(DataControllerResource, self).__init__(**kwargs) + self.extended_location = kwargs.get('extended_location', None) + self.properties = kwargs['properties'] + + +class DataControllerUpdate(msrest.serialization.Model): + """Used for updating a data controller resource. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(DataControllerUpdate, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + + +class ErrorResponse(msrest.serialization.Model): + """An error response from the Azure Data on Azure Arc service. + + :param error: null. + :type error: ~azure_arc_data_management_client.models.ErrorResponseBody + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseBody'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponseBody(msrest.serialization.Model): + """An error response from the Batch service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure_arc_data_management_client.models.ErrorResponseBody] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponseBody]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseBody, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class ExtendedLocation(msrest.serialization.Model): + """The complex type of the extended location. + + :param name: The name of the extended location. + :type name: str + :param type: The type of the extended location. Possible values include: "CustomLocation". + :type type: str or ~azure_arc_data_management_client.models.ExtendedLocationTypes + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLocation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: The identity type. The only acceptable values to pass in are None and + "SystemAssigned". The default value is None. + :type type: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) + + +class K8SResourceRequirements(msrest.serialization.Model): + """The kubernetes resource limits and requests used to restrict or reserve resource usage. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param requests: Requests for a kubernetes resource type (e.g 'cpu', 'memory'). The 'cpu' + request must be less than or equal to 'cpu' limit. Default 'cpu' is 2, minimum is 1. Default + 'memory' is '4Gi', minimum is '2Gi. If sku.tier is GeneralPurpose, maximum 'cpu' is 24 and + maximum 'memory' is '128Gi'. + :type requests: dict[str, str] + :param limits: Limits for a kubernetes resource type (e.g 'cpu', 'memory'). The 'cpu' request + must be less than or equal to 'cpu' limit. Default 'cpu' is 2, minimum is 1. Default 'memory' + is '4Gi', minimum is '2Gi. If sku.tier is GeneralPurpose, maximum 'cpu' is 24 and maximum + 'memory' is '128Gi'. + :type limits: dict[str, str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'requests': {'key': 'requests', 'type': '{str}'}, + 'limits': {'key': 'limits', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(K8SResourceRequirements, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.requests = kwargs.get('requests', None) + self.limits = kwargs.get('limits', None) + + +class K8SScheduling(msrest.serialization.Model): + """The kubernetes scheduling information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param default: The kubernetes scheduling options. It describes restrictions used to help + Kubernetes select appropriate nodes to host the database service. + :type default: ~azure_arc_data_management_client.models.K8SSchedulingOptions + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'default': {'key': 'default', 'type': 'K8SSchedulingOptions'}, + } + + def __init__( + self, + **kwargs + ): + super(K8SScheduling, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.default = kwargs.get('default', None) + + +class K8SSchedulingOptions(msrest.serialization.Model): + """The kubernetes scheduling options. It describes restrictions used to help Kubernetes select appropriate nodes to host the database service. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param resources: The kubernetes resource limits and requests used to restrict or reserve + resource usage. + :type resources: ~azure_arc_data_management_client.models.K8SResourceRequirements + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'resources': {'key': 'resources', 'type': 'K8SResourceRequirements'}, + } + + def __init__( + self, + **kwargs + ): + super(K8SSchedulingOptions, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.resources = kwargs.get('resources', None) + + +class LogAnalyticsWorkspaceConfig(msrest.serialization.Model): + """Log analytics workspace id and primary key. + + :param workspace_id: Azure Log Analytics workspace ID. + :type workspace_id: str + :param primary_key: Primary key of the workspace. + :type primary_key: str + """ + + _attribute_map = { + 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LogAnalyticsWorkspaceConfig, self).__init__(**kwargs) + self.workspace_id = kwargs.get('workspace_id', None) + self.primary_key = kwargs.get('primary_key', None) + + +class ODataError(msrest.serialization.Model): + """Information about an error. + + :param code: A language-independent error name. + :type code: str + :param message: The error message. + :type message: str + :param target: The target of the error (for example, the name of the property in error). + :type target: str + :param details: The error details. + :type details: list[~azure_arc_data_management_client.models.ODataError] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ODataError]'}, + } + + def __init__( + self, + **kwargs + ): + super(ODataError, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class OnPremiseProperty(msrest.serialization.Model): + """Properties from the Kubernetes data controller. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. A globally unique ID identifying the associated Kubernetes cluster. + :type id: str + :param public_signing_key: Required. Certificate that contains the Kubernetes cluster public + key used to verify signing. + :type public_signing_key: str + :param signing_certificate_thumbprint: Unique thumbprint returned to customer to verify the + certificate being uploaded. + :type signing_certificate_thumbprint: str + """ + + _validation = { + 'id': {'required': True}, + 'public_signing_key': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'public_signing_key': {'key': 'publicSigningKey', 'type': 'str'}, + 'signing_certificate_thumbprint': {'key': 'signingCertificateThumbprint', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OnPremiseProperty, self).__init__(**kwargs) + self.id = kwargs['id'] + self.public_signing_key = kwargs['public_signing_key'] + self.signing_certificate_thumbprint = kwargs.get('signing_certificate_thumbprint', None) + + +class Operation(msrest.serialization.Model): + """Azure Data Services on Azure Arc operation definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the operation being performed on this particular object. + :type name: str + :param display: Required. The localized display information for this particular operation / + action. + :type display: ~azure_arc_data_management_client.models.OperationDisplay + :ivar origin: The intended executor of the operation. Possible values include: "user", + "system". + :vartype origin: str or ~azure_arc_data_management_client.models.OperationOrigin + :param is_data_action: Required. Indicates whether the operation is a data action. + :type is_data_action: bool + :ivar properties: Additional descriptions for the operation. + :vartype properties: dict[str, any] + """ + + _validation = { + 'name': {'required': True}, + 'display': {'required': True}, + 'origin': {'readonly': True}, + 'is_data_action': {'required': True}, + 'properties': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.display = kwargs['display'] + self.origin = None + self.is_data_action = kwargs['is_data_action'] + self.properties = None + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + All required parameters must be populated in order to send to Azure. + + :param provider: Required. The localized friendly form of the resource provider name. + :type provider: str + :param resource: Required. The localized friendly form of the resource type related to this + action/operation. + :type resource: str + :param operation: Required. The localized friendly name for the operation. + :type operation: str + :param description: Required. The localized friendly description for the operation. + :type description: str + """ + + _validation = { + 'provider': {'required': True}, + 'resource': {'required': True}, + 'operation': {'required': True}, + 'description': {'required': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = kwargs['provider'] + self.resource = kwargs['resource'] + self.operation = kwargs['operation'] + self.description = kwargs['description'] + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Azure Data Services on Azure Arc operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.Operation] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PageOfDataControllerResource(msrest.serialization.Model): + """PageOfDataControllerResource. + + :param value: + :type value: list[~azure_arc_data_management_client.models.DataControllerResource] + :param next_link: Link to retrieve next page of results. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataControllerResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PageOfDataControllerResource, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Plan(msrest.serialization.Model): + """Plan for the resource. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. A user defined name of the 3rd Party Artifact that is being procured. + :type name: str + :param publisher: Required. The publisher of the 3rd Party Artifact that is being bought. E.g. + NewRelic. + :type publisher: str + :param product: Required. The 3rd Party artifact that is being procured. E.g. NewRelic. Product + maps to the OfferID specified for the artifact at the time of Data Market onboarding. + :type product: str + :param promotion_code: A publisher provided promotion code as provisioned in Data Market for + the said product/artifact. + :type promotion_code: str + :param version: The version of the desired product/artifact. + :type version: str + """ + + _validation = { + 'name': {'required': True}, + 'publisher': {'required': True}, + 'product': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'publisher': {'key': 'publisher', 'type': 'str'}, + 'product': {'key': 'product', 'type': 'str'}, + 'promotion_code': {'key': 'promotionCode', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Plan, self).__init__(**kwargs) + self.name = kwargs['name'] + self.publisher = kwargs['publisher'] + self.product = kwargs['product'] + self.promotion_code = kwargs.get('promotion_code', None) + self.version = kwargs.get('version', None) + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class ResourceSku(msrest.serialization.Model): + """ResourceSku. + + :param capacity: + :type capacity: int + :param family: + :type family: str + :param name: + :type name: str + :param size: + :type size: str + :param tier: + :type tier: str + """ + + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'int'}, + 'family': {'key': 'family', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceSku, self).__init__(**kwargs) + self.capacity = kwargs.get('capacity', None) + self.family = kwargs.get('family', None) + self.name = kwargs.get('name', None) + self.size = kwargs.get('size', None) + self.tier = kwargs.get('tier', None) + + +class SqlManagedInstance(TrackedResource): + """A SqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param properties: Required. null. + :type properties: ~azure_arc_data_management_client.models.SqlManagedInstanceProperties + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure_arc_data_management_client.models.ExtendedLocation + :param sku: Resource sku. + :type sku: ~azure_arc_data_management_client.models.SqlManagedInstanceSku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'SqlManagedInstanceProperties'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'sku': {'key': 'sku', 'type': 'SqlManagedInstanceSku'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstance, self).__init__(**kwargs) + self.properties = kwargs['properties'] + self.extended_location = kwargs.get('extended_location', None) + self.sku = kwargs.get('sku', None) + + +class SqlManagedInstanceK8SRaw(msrest.serialization.Model): + """The raw kubernetes information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param spec: The kubernetes spec information. + :type spec: ~azure_arc_data_management_client.models.SqlManagedInstanceK8SSpec + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'spec': {'key': 'spec', 'type': 'SqlManagedInstanceK8SSpec'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceK8SRaw, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.spec = kwargs.get('spec', None) + + +class SqlManagedInstanceK8SSpec(msrest.serialization.Model): + """The kubernetes spec information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param scheduling: The kubernetes scheduling information. + :type scheduling: ~azure_arc_data_management_client.models.K8SScheduling + :param replicas: This option specifies the number of SQL Managed Instance replicas that will be + deployed in your Kubernetes cluster for high availability purposes. If sku.tier is + BusinessCritical, allowed values are '2' or '3' with default of '3'. If sku.tier is + GeneralPurpose, replicas must be '1'. + :type replicas: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'scheduling': {'key': 'scheduling', 'type': 'K8SScheduling'}, + 'replicas': {'key': 'replicas', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceK8SSpec, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.scheduling = kwargs.get('scheduling', None) + self.replicas = kwargs.get('replicas', None) + + +class SqlManagedInstanceListResult(msrest.serialization.Model): + """A list of SqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.SqlManagedInstance] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SqlManagedInstance]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class SqlManagedInstanceProperties(msrest.serialization.Model): + """Properties of sqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param data_controller_id: null. + :type data_controller_id: str + :param admin: The instance admin user. + :type admin: str + :param start_time: The instance start time. + :type start_time: str + :param end_time: The instance end time. + :type end_time: str + :param k8_s_raw: The raw kubernetes information. + :type k8_s_raw: ~azure_arc_data_management_client.models.SqlManagedInstanceK8SRaw + :param basic_login_information: Username and password for basic authentication. + :type basic_login_information: ~azure_arc_data_management_client.models.BasicLoginInformation + :param last_uploaded_date: Last uploaded date from Kubernetes cluster. Defaults to current date + time. + :type last_uploaded_date: ~datetime.datetime + :ivar provisioning_state: + :vartype provisioning_state: str + :param license_type: The license type to apply for this managed instance. Possible values + include: "BasePrice", "LicenseIncluded". Default value: "BasePrice". + :type license_type: str or + ~azure_arc_data_management_client.models.ArcSqlManagedInstanceLicenseType + :param cluster_id: If a CustomLocation is provided, this contains the ARM id of the connected + cluster the custom location belongs to. + :type cluster_id: str + :param extension_id: If a CustomLocation is provided, this contains the ARM id of the extension + the custom location belongs to. + :type extension_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'data_controller_id': {'key': 'dataControllerId', 'type': 'str'}, + 'admin': {'key': 'admin', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'k8_s_raw': {'key': 'k8sRaw', 'type': 'SqlManagedInstanceK8SRaw'}, + 'basic_login_information': {'key': 'basicLoginInformation', 'type': 'BasicLoginInformation'}, + 'last_uploaded_date': {'key': 'lastUploadedDate', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'extension_id': {'key': 'extensionId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceProperties, self).__init__(**kwargs) + self.data_controller_id = kwargs.get('data_controller_id', None) + self.admin = kwargs.get('admin', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.k8_s_raw = kwargs.get('k8_s_raw', None) + self.basic_login_information = kwargs.get('basic_login_information', None) + self.last_uploaded_date = kwargs.get('last_uploaded_date', None) + self.provisioning_state = None + self.license_type = kwargs.get('license_type', "BasePrice") + self.cluster_id = kwargs.get('cluster_id', None) + self.extension_id = kwargs.get('extension_id', None) + + +class SqlManagedInstanceSku(msrest.serialization.Model): + """The resource model definition representing SKU for Azure Managed Instance - Azure Arc. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The name of the SKU. Has constant value: "vCore". + :vartype name: str + :param tier: The pricing tier for the instance. Possible values include: "GeneralPurpose", + "BusinessCritical". Default value: "GeneralPurpose". + :type tier: str or ~azure_arc_data_management_client.models.SqlManagedInstanceSkuTier + :param dev: Whether dev/test is enabled. When the dev field is set to true, the resource is + used for dev/test purpose. + :type dev: bool + :param size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :type size: str + :param family: + :type family: str + :param capacity: + :type capacity: int + """ + + _validation = { + 'name': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'dev': {'key': 'dev', 'type': 'bool'}, + 'size': {'key': 'size', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + name = "vCore" + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceSku, self).__init__(**kwargs) + self.tier = kwargs.get('tier', "GeneralPurpose") + self.dev = kwargs.get('dev', True) + self.size = kwargs.get('size', None) + self.family = kwargs.get('family', None) + self.capacity = kwargs.get('capacity', None) + + +class SqlManagedInstanceUpdate(msrest.serialization.Model): + """An update to a SQL Managed Instance. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceUpdate, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + + +class SqlServerInstance(TrackedResource): + """A SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param properties: null. + :type properties: ~azure_arc_data_management_client.models.SqlServerInstanceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'SqlServerInstanceProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerInstance, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class SqlServerInstanceListResult(msrest.serialization.Model): + """A list of SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.SqlServerInstance] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SqlServerInstance]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerInstanceListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class SqlServerInstanceProperties(msrest.serialization.Model): + """Properties of SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param version: SQL Server version. Possible values include: "SQL Server 2019", "SQL Server + 2017", "SQL Server 2016". + :type version: str or ~azure_arc_data_management_client.models.SqlVersion + :param edition: SQL Server edition. Possible values include: "Evaluation", "Enterprise", + "Standard", "Web", "Developer", "Express". + :type edition: str or ~azure_arc_data_management_client.models.EditionType + :param container_resource_id: Required. ARM Resource id of the container resource (Azure Arc + for Servers). + :type container_resource_id: str + :ivar create_time: The time when the resource was created. + :vartype create_time: str + :param v_core: The number of logical processors used by the SQL Server instance. + :type v_core: str + :param status: Required. The cloud connectivity status. Possible values include: "Connected", + "Disconnected", "Unknown". + :type status: str or ~azure_arc_data_management_client.models.ConnectionStatus + :param patch_level: SQL Server update level. + :type patch_level: str + :param collation: SQL Server collation. + :type collation: str + :param current_version: SQL Server current version. + :type current_version: str + :param instance_name: SQL Server instance name. + :type instance_name: str + :param tcp_dynamic_ports: Dynamic TCP ports used by SQL Server. + :type tcp_dynamic_ports: str + :param tcp_static_ports: Static TCP ports used by SQL Server. + :type tcp_static_ports: str + :param product_id: SQL Server product ID. + :type product_id: str + :param license_type: SQL Server license type. Possible values include: "Paid", "Free", "HADR", + "Undefined". + :type license_type: str or ~azure_arc_data_management_client.models.ArcSqlServerLicenseType + :param azure_defender_status_last_updated: Timestamp of last Azure Defender status update. + :type azure_defender_status_last_updated: ~datetime.datetime + :param azure_defender_status: Status of Azure Defender. Possible values include: "Protected", + "Unprotected", "Unknown". + :type azure_defender_status: str or ~azure_arc_data_management_client.models.DefenderStatus + :ivar provisioning_state: + :vartype provisioning_state: str + """ + + _validation = { + 'container_resource_id': {'required': True}, + 'create_time': {'readonly': True}, + 'status': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'version': {'key': 'version', 'type': 'str'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'container_resource_id': {'key': 'containerResourceId', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'str'}, + 'v_core': {'key': 'vCore', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'patch_level': {'key': 'patchLevel', 'type': 'str'}, + 'collation': {'key': 'collation', 'type': 'str'}, + 'current_version': {'key': 'currentVersion', 'type': 'str'}, + 'instance_name': {'key': 'instanceName', 'type': 'str'}, + 'tcp_dynamic_ports': {'key': 'tcpDynamicPorts', 'type': 'str'}, + 'tcp_static_ports': {'key': 'tcpStaticPorts', 'type': 'str'}, + 'product_id': {'key': 'productId', 'type': 'str'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'azure_defender_status_last_updated': {'key': 'azureDefenderStatusLastUpdated', 'type': 'iso-8601'}, + 'azure_defender_status': {'key': 'azureDefenderStatus', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerInstanceProperties, self).__init__(**kwargs) + self.version = kwargs.get('version', None) + self.edition = kwargs.get('edition', None) + self.container_resource_id = kwargs['container_resource_id'] + self.create_time = None + self.v_core = kwargs.get('v_core', None) + self.status = kwargs['status'] + self.patch_level = kwargs.get('patch_level', None) + self.collation = kwargs.get('collation', None) + self.current_version = kwargs.get('current_version', None) + self.instance_name = kwargs.get('instance_name', None) + self.tcp_dynamic_ports = kwargs.get('tcp_dynamic_ports', None) + self.tcp_static_ports = kwargs.get('tcp_static_ports', None) + self.product_id = kwargs.get('product_id', None) + self.license_type = kwargs.get('license_type', None) + self.azure_defender_status_last_updated = kwargs.get('azure_defender_status_last_updated', None) + self.azure_defender_status = kwargs.get('azure_defender_status', None) + self.provisioning_state = None + + +class SqlServerInstanceUpdate(msrest.serialization.Model): + """An update to a SQL Server Instance. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerInstanceUpdate, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + + +class SystemData(msrest.serialization.Model): + """Read only system data. + + :param created_by: An identifier for the identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure_arc_data_management_client.models.IdentityType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: An identifier for the identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure_arc_data_management_client.models.IdentityType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class UploadServicePrincipal(msrest.serialization.Model): + """Service principal for uploading billing, metrics and logs. + + :param client_id: Client ID of the service principal for uploading data. + :type client_id: str + :param tenant_id: Tenant ID of the service principal. + :type tenant_id: str + :param authority: Authority for the service principal. Example: + https://login.microsoftonline.com/. + :type authority: str + :param client_secret: Secret of the service principal. + :type client_secret: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'authority': {'key': 'authority', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UploadServicePrincipal, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.authority = kwargs.get('authority', None) + self.client_secret = kwargs.get('client_secret', None) + + +class UploadWatermark(msrest.serialization.Model): + """Properties on upload watermark. Mostly timestamp for each upload data type. + + :param metrics: Last uploaded date for metrics from kubernetes cluster. Defaults to current + date time. + :type metrics: ~datetime.datetime + :param logs: Last uploaded date for logs from kubernetes cluster. Defaults to current date + time. + :type logs: ~datetime.datetime + :param usages: Last uploaded date for usages from kubernetes cluster. Defaults to current date + time. + :type usages: ~datetime.datetime + """ + + _attribute_map = { + 'metrics': {'key': 'metrics', 'type': 'iso-8601'}, + 'logs': {'key': 'logs', 'type': 'iso-8601'}, + 'usages': {'key': 'usages', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(UploadWatermark, self).__init__(**kwargs) + self.metrics = kwargs.get('metrics', None) + self.logs = kwargs.get('logs', None) + self.usages = kwargs.get('usages', None) diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models_py3.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models_py3.py new file mode 100644 index 000000000000..822cf32d7820 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/models/_models_py3.py @@ -0,0 +1,1605 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._azure_arc_data_management_client_enums import * + + +class BasicLoginInformation(msrest.serialization.Model): + """Username and password for basic login authentication. + + :param username: Login username. + :type username: str + :param password: Login password. + :type password: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__( + self, + *, + username: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + super(BasicLoginInformation, self).__init__(**kwargs) + self.username = username + self.password = password + + +class CommonSku(msrest.serialization.Model): + """The resource model definition representing SKU for ARM resources. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the SKU. It is typically a letter+number code. + :type name: str + :param dev: Whether dev/test is enabled. When the dev field is set to true, the resource is + used for dev/test purpose. + :type dev: bool + :param size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :type size: str + :param family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :type family: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'dev': {'key': 'dev', 'type': 'bool'}, + 'size': {'key': 'size', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + name: str, + dev: Optional[bool] = True, + size: Optional[str] = None, + family: Optional[str] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(CommonSku, self).__init__(**kwargs) + self.name = name + self.dev = dev + self.size = size + self.family = family + self.capacity = capacity + + +class DataControllerProperties(msrest.serialization.Model): + """The data controller properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param infrastructure: The infrastructure the data controller is running on. Possible values + include: "azure", "gcp", "aws", "alibaba", "onpremises", "other". Default value: "other". + :type infrastructure: str or ~azure_arc_data_management_client.models.Infrastructure + :param on_premise_property: Properties from the Kubernetes data controller. + :type on_premise_property: ~azure_arc_data_management_client.models.OnPremiseProperty + :param k8_s_raw: The raw kubernetes information. + :type k8_s_raw: any + :param upload_watermark: Properties on upload watermark. Mostly timestamp for each upload data + type. + :type upload_watermark: ~azure_arc_data_management_client.models.UploadWatermark + :param last_uploaded_date: Last uploaded date from Kubernetes cluster. Defaults to current date + time. + :type last_uploaded_date: ~datetime.datetime + :param basic_login_information: Username and password for basic login authentication. + :type basic_login_information: ~azure_arc_data_management_client.models.BasicLoginInformation + :param log_analytics_workspace_config: Log analytics workspace id and primary key. + :type log_analytics_workspace_config: + ~azure_arc_data_management_client.models.LogAnalyticsWorkspaceConfig + :param upload_service_principal: Service principal for uploading billing, metrics and logs. + :type upload_service_principal: ~azure_arc_data_management_client.models.UploadServicePrincipal + :ivar provisioning_state: + :vartype provisioning_state: str + :param cluster_id: If a CustomLocation is provided, this contains the ARM id of the connected + cluster the custom location belongs to. + :type cluster_id: str + :param extension_id: If a CustomLocation is provided, this contains the ARM id of the extension + the custom location belongs to. + :type extension_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'infrastructure': {'key': 'infrastructure', 'type': 'str'}, + 'on_premise_property': {'key': 'onPremiseProperty', 'type': 'OnPremiseProperty'}, + 'k8_s_raw': {'key': 'k8sRaw', 'type': 'object'}, + 'upload_watermark': {'key': 'uploadWatermark', 'type': 'UploadWatermark'}, + 'last_uploaded_date': {'key': 'lastUploadedDate', 'type': 'iso-8601'}, + 'basic_login_information': {'key': 'basicLoginInformation', 'type': 'BasicLoginInformation'}, + 'log_analytics_workspace_config': {'key': 'logAnalyticsWorkspaceConfig', 'type': 'LogAnalyticsWorkspaceConfig'}, + 'upload_service_principal': {'key': 'uploadServicePrincipal', 'type': 'UploadServicePrincipal'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'extension_id': {'key': 'extensionId', 'type': 'str'}, + } + + def __init__( + self, + *, + infrastructure: Optional[Union[str, "Infrastructure"]] = "other", + on_premise_property: Optional["OnPremiseProperty"] = None, + k8_s_raw: Optional[Any] = None, + upload_watermark: Optional["UploadWatermark"] = None, + last_uploaded_date: Optional[datetime.datetime] = None, + basic_login_information: Optional["BasicLoginInformation"] = None, + log_analytics_workspace_config: Optional["LogAnalyticsWorkspaceConfig"] = None, + upload_service_principal: Optional["UploadServicePrincipal"] = None, + cluster_id: Optional[str] = None, + extension_id: Optional[str] = None, + **kwargs + ): + super(DataControllerProperties, self).__init__(**kwargs) + self.infrastructure = infrastructure + self.on_premise_property = on_premise_property + self.k8_s_raw = k8_s_raw + self.upload_watermark = upload_watermark + self.last_uploaded_date = last_uploaded_date + self.basic_login_information = basic_login_information + self.log_analytics_workspace_config = log_analytics_workspace_config + self.upload_service_principal = upload_service_principal + self.provisioning_state = None + self.cluster_id = cluster_id + self.extension_id = extension_id + + +class Resource(msrest.serialization.Model): + """Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + self.system_data = None + + +class DataControllerResource(TrackedResource): + """Data controller resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure_arc_data_management_client.models.ExtendedLocation + :param properties: Required. The data controller's properties. + :type properties: ~azure_arc_data_management_client.models.DataControllerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'properties': {'key': 'properties', 'type': 'DataControllerProperties'}, + } + + def __init__( + self, + *, + location: str, + properties: "DataControllerProperties", + tags: Optional[Dict[str, str]] = None, + extended_location: Optional["ExtendedLocation"] = None, + **kwargs + ): + super(DataControllerResource, self).__init__(tags=tags, location=location, **kwargs) + self.extended_location = extended_location + self.properties = properties + + +class DataControllerUpdate(msrest.serialization.Model): + """Used for updating a data controller resource. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(DataControllerUpdate, self).__init__(**kwargs) + self.tags = tags + + +class ErrorResponse(msrest.serialization.Model): + """An error response from the Azure Data on Azure Arc service. + + :param error: null. + :type error: ~azure_arc_data_management_client.models.ErrorResponseBody + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseBody'}, + } + + def __init__( + self, + *, + error: Optional["ErrorResponseBody"] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class ErrorResponseBody(msrest.serialization.Model): + """An error response from the Batch service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure_arc_data_management_client.models.ErrorResponseBody] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponseBody]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorResponseBody"]] = None, + **kwargs + ): + super(ErrorResponseBody, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class ExtendedLocation(msrest.serialization.Model): + """The complex type of the extended location. + + :param name: The name of the extended location. + :type name: str + :param type: The type of the extended location. Possible values include: "CustomLocation". + :type type: str or ~azure_arc_data_management_client.models.ExtendedLocationTypes + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[Union[str, "ExtendedLocationTypes"]] = None, + **kwargs + ): + super(ExtendedLocation, self).__init__(**kwargs) + self.name = name + self.type = type + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: The identity type. The only acceptable values to pass in are None and + "SystemAssigned". The default value is None. + :type type: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class K8SResourceRequirements(msrest.serialization.Model): + """The kubernetes resource limits and requests used to restrict or reserve resource usage. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param requests: Requests for a kubernetes resource type (e.g 'cpu', 'memory'). The 'cpu' + request must be less than or equal to 'cpu' limit. Default 'cpu' is 2, minimum is 1. Default + 'memory' is '4Gi', minimum is '2Gi. If sku.tier is GeneralPurpose, maximum 'cpu' is 24 and + maximum 'memory' is '128Gi'. + :type requests: dict[str, str] + :param limits: Limits for a kubernetes resource type (e.g 'cpu', 'memory'). The 'cpu' request + must be less than or equal to 'cpu' limit. Default 'cpu' is 2, minimum is 1. Default 'memory' + is '4Gi', minimum is '2Gi. If sku.tier is GeneralPurpose, maximum 'cpu' is 24 and maximum + 'memory' is '128Gi'. + :type limits: dict[str, str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'requests': {'key': 'requests', 'type': '{str}'}, + 'limits': {'key': 'limits', 'type': '{str}'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + requests: Optional[Dict[str, str]] = None, + limits: Optional[Dict[str, str]] = None, + **kwargs + ): + super(K8SResourceRequirements, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.requests = requests + self.limits = limits + + +class K8SScheduling(msrest.serialization.Model): + """The kubernetes scheduling information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param default: The kubernetes scheduling options. It describes restrictions used to help + Kubernetes select appropriate nodes to host the database service. + :type default: ~azure_arc_data_management_client.models.K8SSchedulingOptions + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'default': {'key': 'default', 'type': 'K8SSchedulingOptions'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + default: Optional["K8SSchedulingOptions"] = None, + **kwargs + ): + super(K8SScheduling, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.default = default + + +class K8SSchedulingOptions(msrest.serialization.Model): + """The kubernetes scheduling options. It describes restrictions used to help Kubernetes select appropriate nodes to host the database service. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param resources: The kubernetes resource limits and requests used to restrict or reserve + resource usage. + :type resources: ~azure_arc_data_management_client.models.K8SResourceRequirements + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'resources': {'key': 'resources', 'type': 'K8SResourceRequirements'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + resources: Optional["K8SResourceRequirements"] = None, + **kwargs + ): + super(K8SSchedulingOptions, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.resources = resources + + +class LogAnalyticsWorkspaceConfig(msrest.serialization.Model): + """Log analytics workspace id and primary key. + + :param workspace_id: Azure Log Analytics workspace ID. + :type workspace_id: str + :param primary_key: Primary key of the workspace. + :type primary_key: str + """ + + _attribute_map = { + 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + } + + def __init__( + self, + *, + workspace_id: Optional[str] = None, + primary_key: Optional[str] = None, + **kwargs + ): + super(LogAnalyticsWorkspaceConfig, self).__init__(**kwargs) + self.workspace_id = workspace_id + self.primary_key = primary_key + + +class ODataError(msrest.serialization.Model): + """Information about an error. + + :param code: A language-independent error name. + :type code: str + :param message: The error message. + :type message: str + :param target: The target of the error (for example, the name of the property in error). + :type target: str + :param details: The error details. + :type details: list[~azure_arc_data_management_client.models.ODataError] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ODataError]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ODataError"]] = None, + **kwargs + ): + super(ODataError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class OnPremiseProperty(msrest.serialization.Model): + """Properties from the Kubernetes data controller. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. A globally unique ID identifying the associated Kubernetes cluster. + :type id: str + :param public_signing_key: Required. Certificate that contains the Kubernetes cluster public + key used to verify signing. + :type public_signing_key: str + :param signing_certificate_thumbprint: Unique thumbprint returned to customer to verify the + certificate being uploaded. + :type signing_certificate_thumbprint: str + """ + + _validation = { + 'id': {'required': True}, + 'public_signing_key': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'public_signing_key': {'key': 'publicSigningKey', 'type': 'str'}, + 'signing_certificate_thumbprint': {'key': 'signingCertificateThumbprint', 'type': 'str'}, + } + + def __init__( + self, + *, + id: str, + public_signing_key: str, + signing_certificate_thumbprint: Optional[str] = None, + **kwargs + ): + super(OnPremiseProperty, self).__init__(**kwargs) + self.id = id + self.public_signing_key = public_signing_key + self.signing_certificate_thumbprint = signing_certificate_thumbprint + + +class Operation(msrest.serialization.Model): + """Azure Data Services on Azure Arc operation definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the operation being performed on this particular object. + :type name: str + :param display: Required. The localized display information for this particular operation / + action. + :type display: ~azure_arc_data_management_client.models.OperationDisplay + :ivar origin: The intended executor of the operation. Possible values include: "user", + "system". + :vartype origin: str or ~azure_arc_data_management_client.models.OperationOrigin + :param is_data_action: Required. Indicates whether the operation is a data action. + :type is_data_action: bool + :ivar properties: Additional descriptions for the operation. + :vartype properties: dict[str, any] + """ + + _validation = { + 'name': {'required': True}, + 'display': {'required': True}, + 'origin': {'readonly': True}, + 'is_data_action': {'required': True}, + 'properties': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + display: "OperationDisplay", + is_data_action: bool, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.display = display + self.origin = None + self.is_data_action = is_data_action + self.properties = None + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + All required parameters must be populated in order to send to Azure. + + :param provider: Required. The localized friendly form of the resource provider name. + :type provider: str + :param resource: Required. The localized friendly form of the resource type related to this + action/operation. + :type resource: str + :param operation: Required. The localized friendly name for the operation. + :type operation: str + :param description: Required. The localized friendly description for the operation. + :type description: str + """ + + _validation = { + 'provider': {'required': True}, + 'resource': {'required': True}, + 'operation': {'required': True}, + 'description': {'required': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: str, + resource: str, + operation: str, + description: str, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Azure Data Services on Azure Arc operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.Operation] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PageOfDataControllerResource(msrest.serialization.Model): + """PageOfDataControllerResource. + + :param value: + :type value: list[~azure_arc_data_management_client.models.DataControllerResource] + :param next_link: Link to retrieve next page of results. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataControllerResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["DataControllerResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PageOfDataControllerResource, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Plan(msrest.serialization.Model): + """Plan for the resource. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. A user defined name of the 3rd Party Artifact that is being procured. + :type name: str + :param publisher: Required. The publisher of the 3rd Party Artifact that is being bought. E.g. + NewRelic. + :type publisher: str + :param product: Required. The 3rd Party artifact that is being procured. E.g. NewRelic. Product + maps to the OfferID specified for the artifact at the time of Data Market onboarding. + :type product: str + :param promotion_code: A publisher provided promotion code as provisioned in Data Market for + the said product/artifact. + :type promotion_code: str + :param version: The version of the desired product/artifact. + :type version: str + """ + + _validation = { + 'name': {'required': True}, + 'publisher': {'required': True}, + 'product': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'publisher': {'key': 'publisher', 'type': 'str'}, + 'product': {'key': 'product', 'type': 'str'}, + 'promotion_code': {'key': 'promotionCode', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + publisher: str, + product: str, + promotion_code: Optional[str] = None, + version: Optional[str] = None, + **kwargs + ): + super(Plan, self).__init__(**kwargs) + self.name = name + self.publisher = publisher + self.product = product + self.promotion_code = promotion_code + self.version = version + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class ResourceSku(msrest.serialization.Model): + """ResourceSku. + + :param capacity: + :type capacity: int + :param family: + :type family: str + :param name: + :type name: str + :param size: + :type size: str + :param tier: + :type tier: str + """ + + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'int'}, + 'family': {'key': 'family', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + *, + capacity: Optional[int] = None, + family: Optional[str] = None, + name: Optional[str] = None, + size: Optional[str] = None, + tier: Optional[str] = None, + **kwargs + ): + super(ResourceSku, self).__init__(**kwargs) + self.capacity = capacity + self.family = family + self.name = name + self.size = size + self.tier = tier + + +class SqlManagedInstance(TrackedResource): + """A SqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param properties: Required. null. + :type properties: ~azure_arc_data_management_client.models.SqlManagedInstanceProperties + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure_arc_data_management_client.models.ExtendedLocation + :param sku: Resource sku. + :type sku: ~azure_arc_data_management_client.models.SqlManagedInstanceSku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'SqlManagedInstanceProperties'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'sku': {'key': 'sku', 'type': 'SqlManagedInstanceSku'}, + } + + def __init__( + self, + *, + location: str, + properties: "SqlManagedInstanceProperties", + tags: Optional[Dict[str, str]] = None, + extended_location: Optional["ExtendedLocation"] = None, + sku: Optional["SqlManagedInstanceSku"] = None, + **kwargs + ): + super(SqlManagedInstance, self).__init__(tags=tags, location=location, **kwargs) + self.properties = properties + self.extended_location = extended_location + self.sku = sku + + +class SqlManagedInstanceK8SRaw(msrest.serialization.Model): + """The raw kubernetes information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param spec: The kubernetes spec information. + :type spec: ~azure_arc_data_management_client.models.SqlManagedInstanceK8SSpec + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'spec': {'key': 'spec', 'type': 'SqlManagedInstanceK8SSpec'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + spec: Optional["SqlManagedInstanceK8SSpec"] = None, + **kwargs + ): + super(SqlManagedInstanceK8SRaw, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.spec = spec + + +class SqlManagedInstanceK8SSpec(msrest.serialization.Model): + """The kubernetes spec information. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param scheduling: The kubernetes scheduling information. + :type scheduling: ~azure_arc_data_management_client.models.K8SScheduling + :param replicas: This option specifies the number of SQL Managed Instance replicas that will be + deployed in your Kubernetes cluster for high availability purposes. If sku.tier is + BusinessCritical, allowed values are '2' or '3' with default of '3'. If sku.tier is + GeneralPurpose, replicas must be '1'. + :type replicas: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'scheduling': {'key': 'scheduling', 'type': 'K8SScheduling'}, + 'replicas': {'key': 'replicas', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + scheduling: Optional["K8SScheduling"] = None, + replicas: Optional[int] = None, + **kwargs + ): + super(SqlManagedInstanceK8SSpec, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.scheduling = scheduling + self.replicas = replicas + + +class SqlManagedInstanceListResult(msrest.serialization.Model): + """A list of SqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.SqlManagedInstance] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SqlManagedInstance]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlManagedInstanceListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class SqlManagedInstanceProperties(msrest.serialization.Model): + """Properties of sqlManagedInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param data_controller_id: null. + :type data_controller_id: str + :param admin: The instance admin user. + :type admin: str + :param start_time: The instance start time. + :type start_time: str + :param end_time: The instance end time. + :type end_time: str + :param k8_s_raw: The raw kubernetes information. + :type k8_s_raw: ~azure_arc_data_management_client.models.SqlManagedInstanceK8SRaw + :param basic_login_information: Username and password for basic authentication. + :type basic_login_information: ~azure_arc_data_management_client.models.BasicLoginInformation + :param last_uploaded_date: Last uploaded date from Kubernetes cluster. Defaults to current date + time. + :type last_uploaded_date: ~datetime.datetime + :ivar provisioning_state: + :vartype provisioning_state: str + :param license_type: The license type to apply for this managed instance. Possible values + include: "BasePrice", "LicenseIncluded". Default value: "BasePrice". + :type license_type: str or + ~azure_arc_data_management_client.models.ArcSqlManagedInstanceLicenseType + :param cluster_id: If a CustomLocation is provided, this contains the ARM id of the connected + cluster the custom location belongs to. + :type cluster_id: str + :param extension_id: If a CustomLocation is provided, this contains the ARM id of the extension + the custom location belongs to. + :type extension_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'data_controller_id': {'key': 'dataControllerId', 'type': 'str'}, + 'admin': {'key': 'admin', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'k8_s_raw': {'key': 'k8sRaw', 'type': 'SqlManagedInstanceK8SRaw'}, + 'basic_login_information': {'key': 'basicLoginInformation', 'type': 'BasicLoginInformation'}, + 'last_uploaded_date': {'key': 'lastUploadedDate', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'extension_id': {'key': 'extensionId', 'type': 'str'}, + } + + def __init__( + self, + *, + data_controller_id: Optional[str] = None, + admin: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + k8_s_raw: Optional["SqlManagedInstanceK8SRaw"] = None, + basic_login_information: Optional["BasicLoginInformation"] = None, + last_uploaded_date: Optional[datetime.datetime] = None, + license_type: Optional[Union[str, "ArcSqlManagedInstanceLicenseType"]] = "BasePrice", + cluster_id: Optional[str] = None, + extension_id: Optional[str] = None, + **kwargs + ): + super(SqlManagedInstanceProperties, self).__init__(**kwargs) + self.data_controller_id = data_controller_id + self.admin = admin + self.start_time = start_time + self.end_time = end_time + self.k8_s_raw = k8_s_raw + self.basic_login_information = basic_login_information + self.last_uploaded_date = last_uploaded_date + self.provisioning_state = None + self.license_type = license_type + self.cluster_id = cluster_id + self.extension_id = extension_id + + +class SqlManagedInstanceSku(msrest.serialization.Model): + """The resource model definition representing SKU for Azure Managed Instance - Azure Arc. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The name of the SKU. Has constant value: "vCore". + :vartype name: str + :param tier: The pricing tier for the instance. Possible values include: "GeneralPurpose", + "BusinessCritical". Default value: "GeneralPurpose". + :type tier: str or ~azure_arc_data_management_client.models.SqlManagedInstanceSkuTier + :param dev: Whether dev/test is enabled. When the dev field is set to true, the resource is + used for dev/test purpose. + :type dev: bool + :param size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :type size: str + :param family: + :type family: str + :param capacity: + :type capacity: int + """ + + _validation = { + 'name': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'dev': {'key': 'dev', 'type': 'bool'}, + 'size': {'key': 'size', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + name = "vCore" + + def __init__( + self, + *, + tier: Optional[Union[str, "SqlManagedInstanceSkuTier"]] = "GeneralPurpose", + dev: Optional[bool] = True, + size: Optional[str] = None, + family: Optional[str] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(SqlManagedInstanceSku, self).__init__(**kwargs) + self.tier = tier + self.dev = dev + self.size = size + self.family = family + self.capacity = capacity + + +class SqlManagedInstanceUpdate(msrest.serialization.Model): + """An update to a SQL Managed Instance. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(SqlManagedInstanceUpdate, self).__init__(**kwargs) + self.tags = tags + + +class SqlServerInstance(TrackedResource): + """A SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar system_data: Read only system data. + :vartype system_data: ~azure_arc_data_management_client.models.SystemData + :param properties: null. + :type properties: ~azure_arc_data_management_client.models.SqlServerInstanceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'SqlServerInstanceProperties'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + properties: Optional["SqlServerInstanceProperties"] = None, + **kwargs + ): + super(SqlServerInstance, self).__init__(tags=tags, location=location, **kwargs) + self.properties = properties + + +class SqlServerInstanceListResult(msrest.serialization.Model): + """A list of SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Array of results. + :vartype value: list[~azure_arc_data_management_client.models.SqlServerInstance] + :ivar next_link: Link to retrieve next page of results. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SqlServerInstance]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerInstanceListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class SqlServerInstanceProperties(msrest.serialization.Model): + """Properties of SqlServerInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param version: SQL Server version. Possible values include: "SQL Server 2019", "SQL Server + 2017", "SQL Server 2016". + :type version: str or ~azure_arc_data_management_client.models.SqlVersion + :param edition: SQL Server edition. Possible values include: "Evaluation", "Enterprise", + "Standard", "Web", "Developer", "Express". + :type edition: str or ~azure_arc_data_management_client.models.EditionType + :param container_resource_id: Required. ARM Resource id of the container resource (Azure Arc + for Servers). + :type container_resource_id: str + :ivar create_time: The time when the resource was created. + :vartype create_time: str + :param v_core: The number of logical processors used by the SQL Server instance. + :type v_core: str + :param status: Required. The cloud connectivity status. Possible values include: "Connected", + "Disconnected", "Unknown". + :type status: str or ~azure_arc_data_management_client.models.ConnectionStatus + :param patch_level: SQL Server update level. + :type patch_level: str + :param collation: SQL Server collation. + :type collation: str + :param current_version: SQL Server current version. + :type current_version: str + :param instance_name: SQL Server instance name. + :type instance_name: str + :param tcp_dynamic_ports: Dynamic TCP ports used by SQL Server. + :type tcp_dynamic_ports: str + :param tcp_static_ports: Static TCP ports used by SQL Server. + :type tcp_static_ports: str + :param product_id: SQL Server product ID. + :type product_id: str + :param license_type: SQL Server license type. Possible values include: "Paid", "Free", "HADR", + "Undefined". + :type license_type: str or ~azure_arc_data_management_client.models.ArcSqlServerLicenseType + :param azure_defender_status_last_updated: Timestamp of last Azure Defender status update. + :type azure_defender_status_last_updated: ~datetime.datetime + :param azure_defender_status: Status of Azure Defender. Possible values include: "Protected", + "Unprotected", "Unknown". + :type azure_defender_status: str or ~azure_arc_data_management_client.models.DefenderStatus + :ivar provisioning_state: + :vartype provisioning_state: str + """ + + _validation = { + 'container_resource_id': {'required': True}, + 'create_time': {'readonly': True}, + 'status': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'version': {'key': 'version', 'type': 'str'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'container_resource_id': {'key': 'containerResourceId', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'str'}, + 'v_core': {'key': 'vCore', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'patch_level': {'key': 'patchLevel', 'type': 'str'}, + 'collation': {'key': 'collation', 'type': 'str'}, + 'current_version': {'key': 'currentVersion', 'type': 'str'}, + 'instance_name': {'key': 'instanceName', 'type': 'str'}, + 'tcp_dynamic_ports': {'key': 'tcpDynamicPorts', 'type': 'str'}, + 'tcp_static_ports': {'key': 'tcpStaticPorts', 'type': 'str'}, + 'product_id': {'key': 'productId', 'type': 'str'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'azure_defender_status_last_updated': {'key': 'azureDefenderStatusLastUpdated', 'type': 'iso-8601'}, + 'azure_defender_status': {'key': 'azureDefenderStatus', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + container_resource_id: str, + status: Union[str, "ConnectionStatus"], + version: Optional[Union[str, "SqlVersion"]] = None, + edition: Optional[Union[str, "EditionType"]] = None, + v_core: Optional[str] = None, + patch_level: Optional[str] = None, + collation: Optional[str] = None, + current_version: Optional[str] = None, + instance_name: Optional[str] = None, + tcp_dynamic_ports: Optional[str] = None, + tcp_static_ports: Optional[str] = None, + product_id: Optional[str] = None, + license_type: Optional[Union[str, "ArcSqlServerLicenseType"]] = None, + azure_defender_status_last_updated: Optional[datetime.datetime] = None, + azure_defender_status: Optional[Union[str, "DefenderStatus"]] = None, + **kwargs + ): + super(SqlServerInstanceProperties, self).__init__(**kwargs) + self.version = version + self.edition = edition + self.container_resource_id = container_resource_id + self.create_time = None + self.v_core = v_core + self.status = status + self.patch_level = patch_level + self.collation = collation + self.current_version = current_version + self.instance_name = instance_name + self.tcp_dynamic_ports = tcp_dynamic_ports + self.tcp_static_ports = tcp_static_ports + self.product_id = product_id + self.license_type = license_type + self.azure_defender_status_last_updated = azure_defender_status_last_updated + self.azure_defender_status = azure_defender_status + self.provisioning_state = None + + +class SqlServerInstanceUpdate(msrest.serialization.Model): + """An update to a SQL Server Instance. + + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(SqlServerInstanceUpdate, self).__init__(**kwargs) + self.tags = tags + + +class SystemData(msrest.serialization.Model): + """Read only system data. + + :param created_by: An identifier for the identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure_arc_data_management_client.models.IdentityType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: An identifier for the identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure_arc_data_management_client.models.IdentityType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "IdentityType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "IdentityType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class UploadServicePrincipal(msrest.serialization.Model): + """Service principal for uploading billing, metrics and logs. + + :param client_id: Client ID of the service principal for uploading data. + :type client_id: str + :param tenant_id: Tenant ID of the service principal. + :type tenant_id: str + :param authority: Authority for the service principal. Example: + https://login.microsoftonline.com/. + :type authority: str + :param client_secret: Secret of the service principal. + :type client_secret: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'authority': {'key': 'authority', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + tenant_id: Optional[str] = None, + authority: Optional[str] = None, + client_secret: Optional[str] = None, + **kwargs + ): + super(UploadServicePrincipal, self).__init__(**kwargs) + self.client_id = client_id + self.tenant_id = tenant_id + self.authority = authority + self.client_secret = client_secret + + +class UploadWatermark(msrest.serialization.Model): + """Properties on upload watermark. Mostly timestamp for each upload data type. + + :param metrics: Last uploaded date for metrics from kubernetes cluster. Defaults to current + date time. + :type metrics: ~datetime.datetime + :param logs: Last uploaded date for logs from kubernetes cluster. Defaults to current date + time. + :type logs: ~datetime.datetime + :param usages: Last uploaded date for usages from kubernetes cluster. Defaults to current date + time. + :type usages: ~datetime.datetime + """ + + _attribute_map = { + 'metrics': {'key': 'metrics', 'type': 'iso-8601'}, + 'logs': {'key': 'logs', 'type': 'iso-8601'}, + 'usages': {'key': 'usages', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + metrics: Optional[datetime.datetime] = None, + logs: Optional[datetime.datetime] = None, + usages: Optional[datetime.datetime] = None, + **kwargs + ): + super(UploadWatermark, self).__init__(**kwargs) + self.metrics = metrics + self.logs = logs + self.usages = usages diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/__init__.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/__init__.py new file mode 100644 index 000000000000..71f1823a000e --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._sql_managed_instances_operations import SqlManagedInstancesOperations +from ._sql_server_instances_operations import SqlServerInstancesOperations +from ._data_controllers_operations import DataControllersOperations + +__all__ = [ + 'Operations', + 'SqlManagedInstancesOperations', + 'SqlServerInstancesOperations', + 'DataControllersOperations', +] diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_data_controllers_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_data_controllers_operations.py new file mode 100644 index 000000000000..c0afcbc34fac --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_data_controllers_operations.py @@ -0,0 +1,562 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataControllersOperations(object): + """DataControllersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_in_subscription( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PageOfDataControllerResource"] + """List dataController resources in the subscription. + + List dataController resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PageOfDataControllerResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.PageOfDataControllerResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageOfDataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_in_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PageOfDataControllerResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_in_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/dataControllers'} # type: ignore + + def list_in_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PageOfDataControllerResource"] + """List dataController resources in the resource group. + + List dataController resources in the resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PageOfDataControllerResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.PageOfDataControllerResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageOfDataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_in_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PageOfDataControllerResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_in_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers'} # type: ignore + + def _put_data_controller_initial( + self, + resource_group_name, # type: str + data_controller_name, # type: str + data_controller_resource, # type: "_models.DataControllerResource" + **kwargs # type: Any + ): + # type: (...) -> "_models.DataControllerResource" + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._put_data_controller_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_controller_resource, 'DataControllerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _put_data_controller_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + def begin_put_data_controller( + self, + resource_group_name, # type: str + data_controller_name, # type: str + data_controller_resource, # type: "_models.DataControllerResource" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.DataControllerResource"] + """Creates or replaces a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :param data_controller_resource: desc. + :type data_controller_resource: ~azure_arc_data_management_client.models.DataControllerResource + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either DataControllerResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_arc_data_management_client.models.DataControllerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._put_data_controller_initial( + resource_group_name=resource_group_name, + data_controller_name=data_controller_name, + data_controller_resource=data_controller_resource, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_put_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + def _delete_data_controller_initial( + self, + resource_group_name, # type: str + data_controller_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_data_controller_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_data_controller_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + def begin_delete_data_controller( + self, + resource_group_name, # type: str + data_controller_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_data_controller_initial( + resource_group_name=resource_group_name, + data_controller_name=data_controller_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + def get_data_controller( + self, + resource_group_name, # type: str + data_controller_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.DataControllerResource" + """Retrieves a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataControllerResource, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.DataControllerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get_data_controller.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore + + def patch_data_controller( + self, + resource_group_name, # type: str + data_controller_name, # type: str + data_controller_resource, # type: "_models.DataControllerUpdate" + **kwargs # type: Any + ): + # type: (...) -> "_models.DataControllerResource" + """Updates a dataController resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param data_controller_name: + :type data_controller_name: str + :param data_controller_resource: The update data controller resource. + :type data_controller_resource: ~azure_arc_data_management_client.models.DataControllerUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataControllerResource, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.DataControllerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DataControllerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch_data_controller.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'dataControllerName': self._serialize.url("data_controller_name", data_controller_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_controller_resource, 'DataControllerUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataControllerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch_data_controller.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/dataControllers/{dataControllerName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_operations.py new file mode 100644 index 000000000000..94732fb29a25 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.OperationListResult"] + """Lists all of the available Azure Data Services on Azure Arc API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.AzureArcData/operations'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_managed_instances_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_managed_instances_operations.py new file mode 100644 index 000000000000..9c06d6b09103 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_managed_instances_operations.py @@ -0,0 +1,562 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SqlManagedInstancesOperations(object): + """SqlManagedInstancesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.SqlManagedInstanceListResult"] + """List sqlManagedInstance resources in the subscription. + + List sqlManagedInstance resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlManagedInstanceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.SqlManagedInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('SqlManagedInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/sqlManagedInstances'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.SqlManagedInstanceListResult"] + """List sqlManagedInstance resources in the resource group. + + Gets all sqlManagedInstances in a resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlManagedInstanceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.SqlManagedInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('SqlManagedInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances'} # type: ignore + + def get( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlManagedInstance" + """Retrieves a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: Name of SQL Managed Instance. + :type sql_managed_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlManagedInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlManagedInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + def _create_initial( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + sql_managed_instance, # type: "_models.SqlManagedInstance" + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlManagedInstance" + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sql_managed_instance, 'SqlManagedInstance') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + def begin_create( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + sql_managed_instance, # type: "_models.SqlManagedInstance" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.SqlManagedInstance"] + """Creates or replaces a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: The name of SQL Managed Instances. + :type sql_managed_instance_name: str + :param sql_managed_instance: The SQL Managed Instance to be created or updated. + :type sql_managed_instance: ~azure_arc_data_management_client.models.SqlManagedInstance + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either SqlManagedInstance or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_arc_data_management_client.models.SqlManagedInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + sql_managed_instance_name=sql_managed_instance_name, + sql_managed_instance=sql_managed_instance, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: The name of Sql Managed Instances. + :type sql_managed_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + sql_managed_instance_name=sql_managed_instance_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + sql_managed_instance_name, # type: str + parameters, # type: "_models.SqlManagedInstanceUpdate" + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlManagedInstance" + """Updates a SQL Managed Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_managed_instance_name: Name of sqlManagedInstance. + :type sql_managed_instance_name: str + :param parameters: The SQL Managed Instance. + :type parameters: ~azure_arc_data_management_client.models.SqlManagedInstanceUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlManagedInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlManagedInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlManagedInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlManagedInstanceName': self._serialize.url("sql_managed_instance_name", sql_managed_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SqlManagedInstanceUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlManagedInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlManagedInstances/{sqlManagedInstanceName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_server_instances_operations.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_server_instances_operations.py new file mode 100644 index 000000000000..768ad1eaf582 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/operations/_sql_server_instances_operations.py @@ -0,0 +1,562 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SqlServerInstancesOperations(object): + """SqlServerInstancesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_arc_data_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.SqlServerInstanceListResult"] + """List sqlServerInstance resources in the subscription. + + List sqlServerInstance resources in the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlServerInstanceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.SqlServerInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('SqlServerInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AzureArcData/sqlServerInstances'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.SqlServerInstanceListResult"] + """List sqlServerInstance resources in the resource group. + + Gets all sqlServerInstances in a resource group. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SqlServerInstanceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_arc_data_management_client.models.SqlServerInstanceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstanceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('SqlServerInstanceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances'} # type: ignore + + def get( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlServerInstance" + """Retrieves a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: Name of SQL Server Instance. + :type sql_server_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlServerInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlServerInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + def _create_initial( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + sql_server_instance, # type: "_models.SqlServerInstance" + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlServerInstance" + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(sql_server_instance, 'SqlServerInstance') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + def begin_create( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + sql_server_instance, # type: "_models.SqlServerInstance" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.SqlServerInstance"] + """Creates or replaces a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: The name of SQL Server Instance. + :type sql_server_instance_name: str + :param sql_server_instance: The SQL Server Instance to be created or updated. + :type sql_server_instance: ~azure_arc_data_management_client.models.SqlServerInstance + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either SqlServerInstance or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_arc_data_management_client.models.SqlServerInstance] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + sql_server_instance_name=sql_server_instance_name, + sql_server_instance=sql_server_instance, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: The name of SQL Server Instance. + :type sql_server_instance_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + sql_server_instance_name=sql_server_instance_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + sql_server_instance_name, # type: str + parameters, # type: "_models.SqlServerInstanceUpdate" + **kwargs # type: Any + ): + # type: (...) -> "_models.SqlServerInstance" + """Updates a SQL Server Instance resource. + + :param resource_group_name: The name of the Azure resource group. + :type resource_group_name: str + :param sql_server_instance_name: Name of sqlServerInstance. + :type sql_server_instance_name: str + :param parameters: The SQL Server Instance. + :type parameters: ~azure_arc_data_management_client.models.SqlServerInstanceUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlServerInstance, or the result of cls(response) + :rtype: ~azure_arc_data_management_client.models.SqlServerInstance + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlServerInstance"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'sqlServerInstanceName': self._serialize.url("sql_server_instance_name", sql_server_instance_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SqlServerInstanceUpdate') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SqlServerInstance', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AzureArcData/sqlServerInstances/{sqlServerInstanceName}'} # type: ignore diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/py.typed b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/azure/mgmt/azurearcdata/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/sdk_packaging.toml b/sdk/azurearcdata/azure-mgmt-azurearcdata/sdk_packaging.toml new file mode 100644 index 000000000000..e7ca90eac1d5 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/sdk_packaging.toml @@ -0,0 +1,9 @@ +[packaging] +package_name = "azure-mgmt-azurearcdata" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "Azurearcdata Management" +package_doc_id = "" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.cfg b/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.py b/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.py new file mode 100644 index 000000000000..e8339e57c249 --- /dev/null +++ b/sdk/azurearcdata/azure-mgmt-azurearcdata/setup.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import re +import os.path +from io import open +from setuptools import find_packages, setup + +# Change the PACKAGE_NAME only to change folder and different name +PACKAGE_NAME = "azure-mgmt-azurearcdata" +PACKAGE_PPRINT_NAME = "Azurearcdata Management" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# azure v0.x is not compatible with this package +# azure v0.x used to have a __version__ attribute (newer versions don't) +try: + import azure + try: + ver = azure.__version__ + raise Exception( + 'This package is incompatible with azure=={}. '.format(ver) + + 'Uninstall it with "pip uninstall azure".' + ) + except AttributeError: + pass +except ImportError: + pass + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, 'version.py') + if os.path.exists(os.path.join(package_folder_path, 'version.py')) + else os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + readme = f.read() +with open('CHANGELOG.md', encoding='utf-8') as f: + changelog = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + long_description=readme + '\n\n' + changelog, + long_description_content_type='text/markdown', + license='MIT License', + author='Microsoft Corporation', + author_email='azpysdkhelp@microsoft.com', + url='https://github.com/Azure/azure-sdk-for-python', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'License :: OSI Approved :: MIT License', + ], + zip_safe=False, + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.mgmt', + ]), + install_requires=[ + 'msrest>=0.6.21', + 'azure-common~=1.1', + 'azure-mgmt-core>=1.2.0,<2.0.0', + ], + extras_require={ + ":python_version<'3.0'": ['azure-mgmt-nspkg'], + } +) diff --git a/sdk/azurearcdata/ci.yml b/sdk/azurearcdata/ci.yml new file mode 100644 index 000000000000..48168494a8e0 --- /dev/null +++ b/sdk/azurearcdata/ci.yml @@ -0,0 +1,33 @@ +# DO NOT EDIT THIS FILE +# This file is generated automatically and any changes will be lost. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/azurearcdata/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/azurearcdata/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: azurearcdata + Artifacts: + - name: azure-mgmt-azurearcdata + safeName: azuremgmtazurearcdata diff --git a/sdk/communication/azure-communication-chat/CHANGELOG.md b/sdk/communication/azure-communication-chat/CHANGELOG.md index 5d6e923f9b5a..be7af070f13d 100644 --- a/sdk/communication/azure-communication-chat/CHANGELOG.md +++ b/sdk/communication/azure-communication-chat/CHANGELOG.md @@ -1,6 +1,6 @@ # Release History -## 1.1.0 (Unreleased) +## 1.1.0 (2021-09-15) - Updated `azure-communication-chat` version. ## 1.1.0b1 (2021-08-16) diff --git a/sdk/communication/azure-communication-chat/azure/communication/chat/_version.py b/sdk/communication/azure-communication-chat/azure/communication/chat/_version.py index a83b9274f4f8..c274994f7969 100644 --- a/sdk/communication/azure-communication-chat/azure/communication/chat/_version.py +++ b/sdk/communication/azure-communication-chat/azure/communication/chat/_version.py @@ -4,6 +4,6 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "1.1.0b1" +VERSION = "1.1.0" SDK_MONIKER = "communication-chat/{}".format(VERSION) # type: str diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/01_Patrons.ipynb b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/01_Patrons.ipynb new file mode 100644 index 000000000000..5fa433175299 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/01_Patrons.ipynb @@ -0,0 +1,605 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Patron\n", + "## An example of the Digital Twin\n", + "\n", + "This example just includes:\n", + "* Connecting\n", + "* Uploading models\n", + "* Creating twins\n", + "* Querying twins\n", + "\n", + "It's not a lot, but it will all tie together when we start running scenarios. \n", + "\n", + "[This is the SDK repo on Github](https://github.com/Azure/azure-sdk-for-python/tree/4559e19e2f3146a49f1eba1706bb798071f4a1f5/sdk/digitaltwins/azure-digitaltwins-core)\n", + "\n", + "[Here is the doc on the query language](https://docs.microsoft.com/en-us/azure/digital-twins/concepts-query-language)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Connecting\n", + "\n", + "**note**: you will need to replace {`home-test-twin.api.wcus.digitaltwins.azure.net`} with the address of your ADT. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from azure.identity import AzureCliCredential\n", + "from azure.digitaltwins.core import DigitalTwinsClient\n", + "\n", + "# using yaml instead of \n", + "import yaml\n", + "import uuid\n", + "\n", + "# using altair instead of matplotlib for vizuals\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "# you will get this from the ADT resource at portal.azure.com\n", + "your_digital_twin_url = \"home-test-twin.api.wcus.digitaltwins.azure.net\"\n", + "\n", + "azure_cli = AzureCliCredential()\n", + "service_client = DigitalTwinsClient(\n", + " your_digital_twin_url, azure_cli)\n", + "service_client" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "service_client" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Loading the model from a local Json file, I'm defining a model for a `Patron`. That's like a `class` that will be used to make several instances of a customer." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uploading models" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "patron_model_id = \"dtmi:mymodels:patron;1\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# # Delete the model that you don't want. \n", + "# service_client.delete_model(patron_model_id)\n", + "\n", + "# # Create it if you just deleted it.\n", + "# patron_model_json = yaml.safe_load(open(\"models/patron.json\"))\n", + "# service_client.create_models([patron_model_json])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the json must be in a list. This is so that you can deploy several models in a single go." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'display_name': {'en': 'Patron'},\n", + " 'description': {'en': 'As an example, contains all of the properties possible in the DTDL.'},\n", + " 'id': 'dtmi:billmanh:patron;1',\n", + " 'upload_time': '2020-11-19T02:14:27.21094Z',\n", + " 'decommissioned': False}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Get the Patron model\n", + "get_model = service_client.get_model(patron_model_id)\n", + "get_model.as_dict()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In my case I use `customer` as an _instance_ of a model (a _twin_). The only thing to add is the information specific to the twin I want to upload. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating twins" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "digital_twin_id = 'customer-' + str(uuid.uuid4())\n", + "\n", + "customer_json = {\n", + " \"$metadata\": {\n", + " \"$model\": patron_model_id\n", + " },\n", + " \"satisfaction\": 10,\n", + " \"totalWaitTime\": 10\n", + "}\n", + "\n", + "created_twin = service_client.upsert_digital_twin(digital_twin_id, customer_json)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can see that the process with the API is simple:\n", + "* Define a model\n", + "* Build a twin from that model\n", + "* Give that model some attributes.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(created_twin)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$dtId': 'customer-3cbd5e60-957d-44ff-944f-9adb42a20a52',\n", + " '$etag': 'W/\"ea33ae65-f47c-4116-8e3d-1c6fa61777d6\"',\n", + " 'satisfaction': 10,\n", + " 'totalWaitTime': 10,\n", + " '$metadata': {'$model': 'dtmi:billmanh:patron;1',\n", + " 'satisfaction': {'lastUpdateTime': '2020-11-19T15:48:13.1057699Z'},\n", + " 'totalWaitTime': {'lastUpdateTime': '2020-11-19T15:48:13.1057699Z'}}}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "created_twin" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So let's create a bunch of people. Let's pretend we have a single room full of a bunch of people that all have similar attributes:\n", + "* `satisfaction` varies from person to person.\n", + "* `totalWaitTime` is the same at the beginning. \n", + "\n", + "the item for the `$metadata` is the model that it will be an instance of. The rest of the items in the dict are the `contents` of the model as you defined it.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[('customer-5c454e2f-f70b-4352-b75a-958f1a49beba',\n", + " {'$metadata': {'$model': 'dtmi:billmanh:patron;1'},\n", + " 'satisfaction': 7,\n", + " 'totalWaitTime': 0}),\n", + " ('customer-26196fee-5ffd-457a-86b7-192a998f3cf2',\n", + " {'$metadata': {'$model': 'dtmi:billmanh:patron;1'},\n", + " 'satisfaction': 9,\n", + " 'totalWaitTime': 0}),\n", + " ('customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c',\n", + " {'$metadata': {'$model': 'dtmi:billmanh:patron;1'},\n", + " 'satisfaction': 7,\n", + " 'totalWaitTime': 0}),\n", + " ('customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc',\n", + " {'$metadata': {'$model': 'dtmi:billmanh:patron;1'},\n", + " 'satisfaction': 8,\n", + " 'totalWaitTime': 0}),\n", + " ('customer-21e17d28-76c3-4c04-8df9-396703692a68',\n", + " {'$metadata': {'$model': 'dtmi:billmanh:patron;1'},\n", + " 'satisfaction': 8,\n", + " 'totalWaitTime': 0})]" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def create_new_customer():\n", + " digital_twin_id = 'customer-' + str(uuid.uuid4())\n", + " customer_json = {\n", + " \"$metadata\": {\n", + " \"$model\": patron_model_id\n", + " },\n", + " \"satisfaction\": np.random.randint(5,10),\n", + " \"totalWaitTime\": 0\n", + " }\n", + " return digital_twin_id,customer_json\n", + " \n", + "customer_twin_examples = [create_new_customer() for i in range(40)]\n", + "customer_twin_examples[:5]" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "
\n", + "" + ], + "text/plain": [ + "alt.Chart(...)" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customer_df = pd.DataFrame([i[1] for i in customer_twin_examples])\n", + "\n", + "customer_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Of course, those are just local customer data points. You could imagine that this is information collected at the edge. \n", + "\n", + "Now let's upload those people to see them in the cloud. Try to imagine that each customer in the store had an app on their phone that sent this information to the graph. Each `customer` represents a data feed from IoT telemetry. Generally, each device would upload a separate feed of IoT, but we are simulating using a single device (your computer) to upload this data.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "for i in customer_twin_examples:\n", + " service_client.upsert_digital_twin(i[0], i[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's query our customers to see how they are feeling. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying twins\n", + "This is pretty easy. If you've ever done SQL you'll get this right away.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "query_expression = 'SELECT * FROM digitaltwins'\n", + "query_result = service_client.query_twins(query_expression)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "for i in query_result:\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$dtId': 'customer-2e551c55-265a-46e0-a84b-4965bc734e21',\n", + " '$etag': 'W/\"8f1be988-70af-4488-a7c9-5403a4b6ea1d\"',\n", + " 'satisfaction': 9,\n", + " 'totalWaitTime': 0,\n", + " '$metadata': {'$model': 'dtmi:billmanh:patron;1',\n", + " 'satisfaction': {'lastUpdateTime': '2020-11-19T16:13:44.9882083Z'},\n", + " 'totalWaitTime': {'lastUpdateTime': '2020-11-19T16:13:44.9882083Z'}}}" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "i" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's say I want to get all of the customers that have > 7 satisfaction _Happy customers_" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "query_result = service_client.query_twins(\n", + "\"\"\"\n", + "SELECT *\n", + "FROM DigitalTwins T \n", + "WHERE T.satisfaction > 7\n", + "\"\"\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 10\n", + "customer-3cbd5e60-957d-44ff-944f-9adb42a20a52 10\n", + "customer-26196fee-5ffd-457a-86b7-192a998f3cf2 9\n", + "customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc 8\n", + "customer-21e17d28-76c3-4c04-8df9-396703692a68 8\n", + "customer-8375cece-abf8-400f-be66-459f7f40fe6a 8\n", + "customer-d0483de8-0f9f-41d4-ac0c-84d608f7467a 8\n", + "customer-8ae03d4b-ee2c-4ffd-9aa8-8edbfbf39728 9\n", + "customer-fc568319-d5a9-492a-83ce-fed774257003 8\n", + "customer-d9ca8715-1a6d-43ea-83d2-671fc972ebd7 9\n", + "customer-acd7a91f-1d28-46b3-894d-6f3953fbd85e 8\n", + "customer-a867aacb-68a7-4872-a3bb-330b0b34ef7c 9\n", + "customer-373017d3-d7e2-4407-8c5b-5fc44e20286f 9\n", + "customer-418cbc74-b101-4f50-8cf0-6075a2a8053c 8\n", + "customer-04cc7f9d-5503-4d64-8dbc-373a47a8710a 9\n", + "customer-e5d090d6-ae68-4c31-890e-16ce96c1e462 9\n", + "customer-12a558cf-b15d-41ee-a98b-d07c4b7411ee 8\n", + "customer-1c51c2a0-45bc-442c-b4df-1d193e11f628 8\n", + "customer-ca674938-4d13-463f-8858-e0b9ac5deebc 9\n", + "customer-2139a9c9-ff79-4a63-9576-c34744f0521d 9\n", + "customer-2e551c55-265a-46e0-a84b-4965bc734e21 9\n", + "customer-107c1168-d86b-40e1-a57d-4c500148e172 10\n", + "customer-73299891-e2f2-4d29-98f1-9d2ee0e9f12b 9\n", + "customer-d1de64aa-cfc2-4385-9644-667506f0f474 9\n", + "customer-d4e84d9f-ee9d-45e8-ba9f-d9d90885f578 9\n", + "customer-b7a9fbba-8e06-4b23-ac39-35acfbacfe7c 8\n", + "customer-83f65979-cd94-41da-b248-c6a1a76e2f13 9\n", + "customer-84e9f81b-db4c-40c7-8284-230a357937a7 8\n", + "customer-06c5c57e-aec0-47c4-ac78-b986e589da15 8\n", + "customer-26a50817-189d-48cf-8ecc-efa9828f092a 9\n", + "customer-45e9aa03-733d-4a99-b9d5-94f1c6b04214 9\n", + "customer-0234cb48-1fa2-43e0-b69d-36a6ff269666 9\n", + "customer-048f85a8-173e-4305-92b8-ead2a748b07f 8\n", + "customer-25e19268-3433-4f09-afe3-94f466313368 10\n" + ] + } + ], + "source": [ + "for i in query_result:\n", + " print (i['$dtId'],i['satisfaction'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Of course, you can just pull everything, but this might be problematic as you build a larger model. " + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "query_result = service_client.query_twins(\n", + "\"\"\"\n", + "SELECT COUNT() \n", + "FROM DigitalTwins \n", + "\"\"\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'COUNT': 88}\n" + ] + } + ], + "source": [ + "for i in query_result:\n", + " print(i)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (azure_test)", + "language": "python", + "name": "azure_test" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/02_Purchasing_Tickets.ipynb b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/02_Purchasing_Tickets.ipynb new file mode 100644 index 000000000000..f5f2199c7661 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/02_Purchasing_Tickets.ipynb @@ -0,0 +1,1695 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Purchasing tickets\n", + "## An example use case of the Digital Twin\n", + "\n", + "This example just includes:\n", + "* Connecting\n", + "* Building relationships between models\n", + "* Updating values\n", + "* Modifying the relationships and updating twins\n", + "* Querying twins by relationship\n", + "\n", + "In the previous scenario we made a bunch of customers. In this scenario we built a bunch of individual users \n", + "[This is the SDK repo on Github](https://github.com/Azure/azure-sdk-for-python/tree/4559e19e2f3146a49f1eba1706bb798071f4a1f5/sdk/digitaltwins/azure-digitaltwins-core)\n", + "\n", + "[Here is the doc on the query language](https://docs.microsoft.com/en-us/azure/digital-twins/concepts-query-language)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from azure.identity import AzureCliCredential\n", + "from azure.digitaltwins.core import DigitalTwinsClient\n", + "\n", + "# using yaml instead of \n", + "import yaml\n", + "import uuid\n", + "\n", + "# using altair instead of matplotlib for vizuals\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "# you will get this from the ADT resource at portal.azure.com\n", + "your_digital_twin_url = \"home-test-twin.api.wcus.digitaltwins.azure.net\"\n", + "\n", + "azure_cli = AzureCliCredential()\n", + "service_client = DigitalTwinsClient(\n", + " your_digital_twin_url, azure_cli)\n", + "service_client" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So from the previous notebook we uploaded some models. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'display_name': {'en': 'Patron'},\n", + " 'description': {'en': 'As an example, contains all of the properties possible in the DTDL.'},\n", + " 'id': 'dtmi:billmanh:patron;1',\n", + " 'upload_time': '2020-11-30T00:26:34.514069Z',\n", + " 'decommissioned': False}" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "patron = service_client.get_model(\"dtmi:mymodels:patron;1\")\n", + "patron.as_dict()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And we have several `customers` in our ecosystem that are made from the `patron` model." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "query_expression = \"SELECT * FROM digitaltwins t where IS_OF_MODEL('dtmi:billmanh:patron;1')\"\n", + "query_result = service_client.query_twins(query_expression)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Note** the query object loves to drop values. To keep from making multiple queries, save the data somewhere. " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "values = []\n", + "for i in query_result:\n", + " values.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "df_customers = pd.DataFrame([[i['$dtId'],i['satisfaction']] for i in values],\n", + " columns=['id','satisfaction'])" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idsatisfaction
0customer-cc04f3b6-39b0-4cef-bfff-a7d668cce44610
1customer-3cbd5e60-957d-44ff-944f-9adb42a20a5210
2customer-5c454e2f-f70b-4352-b75a-958f1a49beba7
3customer-26196fee-5ffd-457a-86b7-192a998f3cf29
4customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c7
.........
79customer-45e9aa03-733d-4a99-b9d5-94f1c6b042149
80customer-0234cb48-1fa2-43e0-b69d-36a6ff2696669
81customer-75b2f757-faee-4a85-bc93-e6e9ff7cd8916
82customer-048f85a8-173e-4305-92b8-ead2a748b07f8
83customer-25e19268-3433-4f09-afe3-94f46631336810
\n", + "

84 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " id satisfaction\n", + "0 customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 10\n", + "1 customer-3cbd5e60-957d-44ff-944f-9adb42a20a52 10\n", + "2 customer-5c454e2f-f70b-4352-b75a-958f1a49beba 7\n", + "3 customer-26196fee-5ffd-457a-86b7-192a998f3cf2 9\n", + "4 customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c 7\n", + ".. ... ...\n", + "79 customer-45e9aa03-733d-4a99-b9d5-94f1c6b04214 9\n", + "80 customer-0234cb48-1fa2-43e0-b69d-36a6ff269666 9\n", + "81 customer-75b2f757-faee-4a85-bc93-e6e9ff7cd891 6\n", + "82 customer-048f85a8-173e-4305-92b8-ead2a748b07f 8\n", + "83 customer-25e19268-3433-4f09-afe3-94f466313368 10\n", + "\n", + "[84 rows x 2 columns]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_customers" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's add another model for `tickets`. In our very simple model, we will assume that `customers` will buy `tickets` to our events." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "# service_client.delete_model(ticket_model_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# ticket_model_json = yaml.safe_load(open(\"models/ticket.json\"))\n", + "# service_client.create_models([ticket_model_json])" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'display_name': {'en': 'ticket'},\n", + " 'description': {'en': 'an abstract ticket'},\n", + " 'id': 'dtmi:billmanh:ticket;1',\n", + " 'upload_time': '2020-11-26T20:12:34.578813Z',\n", + " 'decommissioned': False}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ticket_model_id = \"dtmi:mymodels:ticket;1\"\n", + "get_model = service_client.get_model(ticket_model_id)\n", + "get_model.as_dict()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the dict returned from the service doesn't contain all of the values that you created, but that's ok. " + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def generate_twin(name,model_id):\n", + " digital_twin_id = f'{name}-{str(uuid.uuid4())}'\n", + " dt_json = {\n", + " \"$metadata\": {\n", + " \"$model\": model_id\n", + " }\n", + " }\n", + " return digital_twin_id,dt_json\n", + "\n", + "def updsert_twin():\n", + " created_twin = service_client.upsert_digital_twin(digital_twin_id, dt_json)\n", + " return created_twin" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "ticket_id, ticket_json = generate_twin(\"ticket\",ticket_model_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$metadata': {'$model': 'dtmi:billmanh:ticket;1'}}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ticket_json" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So when I create the `tickets` I’m going to create them as unsold (`available`) and for a range of events. I'm just going to introduce some tickets into the system for some shows coming up. These tickets just exist locally at this point, but we will 'go live' when we push them into the ecosystem." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$metadataevent_titlestateticket_locationuid
0{'$model': 'dtmi:billmanh:ticket;1'}Nirvanaopen0ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe
1{'$model': 'dtmi:billmanh:ticket;1'}Nirvanaopen1ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9
2{'$model': 'dtmi:billmanh:ticket;1'}Nirvanaopen2ticket-260e3349-adc5-41af-9ae5-4f3690d813c7
3{'$model': 'dtmi:billmanh:ticket;1'}Nirvanaopen3ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717
4{'$model': 'dtmi:billmanh:ticket;1'}Nirvanaopen4ticket-cf70a216-0183-4458-8e13-94f432f7b8c0
5{'$model': 'dtmi:billmanh:ticket;1'}Smashing Pumpkinsopen0ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67ba
6{'$model': 'dtmi:billmanh:ticket;1'}Smashing Pumpkinsopen1ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109
7{'$model': 'dtmi:billmanh:ticket;1'}Smashing Pumpkinsopen2ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4
8{'$model': 'dtmi:billmanh:ticket;1'}Smashing Pumpkinsopen3ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00
9{'$model': 'dtmi:billmanh:ticket;1'}Smashing Pumpkinsopen4ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961
10{'$model': 'dtmi:billmanh:ticket;1'}Foo Fightersopen0ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706c
11{'$model': 'dtmi:billmanh:ticket;1'}Foo Fightersopen1ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3
12{'$model': 'dtmi:billmanh:ticket;1'}Foo Fightersopen2ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063
13{'$model': 'dtmi:billmanh:ticket;1'}Foo Fightersopen3ticket-09028447-8e23-4e4f-a100-098a4dc8e919
14{'$model': 'dtmi:billmanh:ticket;1'}Foo Fightersopen4ticket-ccc89397-0d52-42a3-8fe1-39c97158c87c
\n", + "
" + ], + "text/plain": [ + " $metadata event_title state \\\n", + "0 {'$model': 'dtmi:billmanh:ticket;1'} Nirvana open \n", + "1 {'$model': 'dtmi:billmanh:ticket;1'} Nirvana open \n", + "2 {'$model': 'dtmi:billmanh:ticket;1'} Nirvana open \n", + "3 {'$model': 'dtmi:billmanh:ticket;1'} Nirvana open \n", + "4 {'$model': 'dtmi:billmanh:ticket;1'} Nirvana open \n", + "5 {'$model': 'dtmi:billmanh:ticket;1'} Smashing Pumpkins open \n", + "6 {'$model': 'dtmi:billmanh:ticket;1'} Smashing Pumpkins open \n", + "7 {'$model': 'dtmi:billmanh:ticket;1'} Smashing Pumpkins open \n", + "8 {'$model': 'dtmi:billmanh:ticket;1'} Smashing Pumpkins open \n", + "9 {'$model': 'dtmi:billmanh:ticket;1'} Smashing Pumpkins open \n", + "10 {'$model': 'dtmi:billmanh:ticket;1'} Foo Fighters open \n", + "11 {'$model': 'dtmi:billmanh:ticket;1'} Foo Fighters open \n", + "12 {'$model': 'dtmi:billmanh:ticket;1'} Foo Fighters open \n", + "13 {'$model': 'dtmi:billmanh:ticket;1'} Foo Fighters open \n", + "14 {'$model': 'dtmi:billmanh:ticket;1'} Foo Fighters open \n", + "\n", + " ticket_location uid \n", + "0 0 ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe \n", + "1 1 ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9 \n", + "2 2 ticket-260e3349-adc5-41af-9ae5-4f3690d813c7 \n", + "3 3 ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717 \n", + "4 4 ticket-cf70a216-0183-4458-8e13-94f432f7b8c0 \n", + "5 0 ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67ba \n", + "6 1 ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109 \n", + "7 2 ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4 \n", + "8 3 ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00 \n", + "9 4 ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961 \n", + "10 0 ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706c \n", + "11 1 ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3 \n", + "12 2 ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063 \n", + "13 3 ticket-09028447-8e23-4e4f-a100-098a4dc8e919 \n", + "14 4 ticket-ccc89397-0d52-42a3-8fe1-39c97158c87c " + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def generate_tickets(title,n_tickets):\n", + " tickets = []\n", + " for i in range(n_tickets):\n", + " ticket_id, ticket_json = generate_twin(\"ticket\",ticket_model_id)\n", + " ticket_json['event_title'] = title\n", + " ticket_json['state'] = 'open'\n", + " ticket_json['ticket_location'] = i\n", + " ticket_json['uid'] = f'ticket-{str(uuid.uuid4())}'\n", + " tickets.append(ticket_json)\n", + " return tickets\n", + "\n", + "tickets_df = pd.concat([pd.DataFrame(generate_tickets('Nirvana',5)),\n", + " pd.DataFrame(generate_tickets('Smashing Pumpkins',5)),\n", + " pd.DataFrame(generate_tickets('Foo Fighters',5))]).reset_index(drop=True)\n", + "\n", + "tickets_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Ok now that we know what tickets we want to sell, let's push them to the digital twin ecosystem. This is exactly the same as what we did with `Customers` in step one. " + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "ticket_twins = []\n", + "for i in tickets_df.index:\n", + " ticket_data = tickets_df.loc[i]\n", + " ticket_twin_id = ticket_data['uid']\n", + " ticket_json = ticket_data.drop('uid').to_dict()\n", + " created_twin = service_client.upsert_digital_twin(ticket_twin_id, ticket_json)\n", + " ticket_twins.append(created_twin)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'$dtId': 'ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe',\n", + " '$etag': 'W/\"172ddd8b-a58c-41da-9081-19b0ff611ffa\"',\n", + " 'event_title': 'Nirvana',\n", + " 'state': 'open',\n", + " 'ticket_location': '0',\n", + " '$metadata': {'$model': 'dtmi:billmanh:ticket;1',\n", + " 'event_title': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'},\n", + " 'state': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'},\n", + " 'ticket_location': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'}}},\n", + " {'$dtId': 'ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9',\n", + " '$etag': 'W/\"f775c7be-176a-401a-b427-bd7797752283\"',\n", + " 'event_title': 'Nirvana',\n", + " 'state': 'open',\n", + " 'ticket_location': '1',\n", + " '$metadata': {'$model': 'dtmi:billmanh:ticket;1',\n", + " 'event_title': {'lastUpdateTime': '2020-12-07T01:30:50.8441094Z'},\n", + " 'state': {'lastUpdateTime': '2020-12-07T01:30:50.8441094Z'},\n", + " 'ticket_location': {'lastUpdateTime': '2020-12-07T01:30:50.8441094Z'}}},\n", + " {'$dtId': 'ticket-260e3349-adc5-41af-9ae5-4f3690d813c7',\n", + " '$etag': 'W/\"8670c70a-b849-4d0c-abbc-5bcf6b77ec21\"',\n", + " 'event_title': 'Nirvana',\n", + " 'state': 'open',\n", + " 'ticket_location': '2',\n", + " '$metadata': {'$model': 'dtmi:billmanh:ticket;1',\n", + " 'event_title': {'lastUpdateTime': '2020-12-07T01:30:51.0152850Z'},\n", + " 'state': {'lastUpdateTime': '2020-12-07T01:30:51.0152850Z'},\n", + " 'ticket_location': {'lastUpdateTime': '2020-12-07T01:30:51.0152850Z'}}}]" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ticket_twins[:3]" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "# # Delete twins if you want\n", + "# for i in tickets_df.index:\n", + "# ticket_data = tickets_df.loc[i]\n", + "# ticket_twin_id = ticket_data['uid']\n", + "# service_client.delete_digital_twin(ticket_twin_id)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So now I have `customers` and I have `tickets`, but there is no relationship between them. Let's pretend that I have a website that sells tickets. When a `customer` buys a `ticket` on the website, a relationship between the two is established AND the status of the ticket changes to sold. \n", + "\n", + "* website (or mobile app) will tell the user which tickets are still available, and have features that allow them to search by row, or by concert.\n", + "* the website (or mobile app) gets the latitude and longitude of the user when they buy the tickets. \n", + "* once purchased, the status of that ticket changes to `sold`.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The user experience (mobile app)\n", + "\n", + "Here is where the user will buy tickets. \n", + "\n", + "This cell is a customer experience. The `customer` enters the website and looks up the available tickets for the show they want. This data comes from the website. " + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "# selection from a dropdown menu or something\n", + "selection_show = 'Nirvana'\n", + "# just grabbing the first customer. \n", + "user = df_customers.loc[0]['id']\n", + "user_lat = np.random.randint(0,100)\n", + "user_long = np.random.randint(0,100)\n", + "\n", + "query_expression = f\"\"\"\n", + "SELECT * FROM digitaltwins where IS_OF_MODEL('{ticket_model_id}') \n", + "and state = 'open'\n", + "and event_title = '{selection_show}'\n", + "\"\"\"\n", + "query_result = service_client.query_twins(query_expression)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the client dumps records after the query. Try running the cell below a few times. If you need to use the values again and again you need to put them into memory somewhere. " + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtIdevent_titlestate
0ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbeNirvanaopen
1ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9Nirvanaopen
2ticket-cf70a216-0183-4458-8e13-94f432f7b8c0Nirvanaopen
3ticket-260e3349-adc5-41af-9ae5-4f3690d813c7Nirvanaopen
4ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717Nirvanaopen
\n", + "
" + ], + "text/plain": [ + " $dtId event_title state\n", + "0 ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe Nirvana open\n", + "1 ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9 Nirvana open\n", + "2 ticket-cf70a216-0183-4458-8e13-94f432f7b8c0 Nirvana open\n", + "3 ticket-260e3349-adc5-41af-9ae5-4f3690d813c7 Nirvana open\n", + "4 ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717 Nirvana open" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "available_tickets_df = pd.DataFrame([[i['$dtId'],i['event_title'],i['state']] for i in query_result],\n", + " columns = ['$dtId','event_title','state'])\n", + "available_tickets_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now the user chooses to buy a ticket. The ticket state changes to closed." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$dtId': 'ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe',\n", + " '$etag': 'W/\"172ddd8b-a58c-41da-9081-19b0ff611ffa\"',\n", + " 'event_title': 'Nirvana',\n", + " 'state': 'open',\n", + " 'ticket_location': '0',\n", + " '$metadata': {'$model': 'dtmi:billmanh:ticket;1',\n", + " 'event_title': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'},\n", + " 'state': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'},\n", + " 'ticket_location': {'lastUpdateTime': '2020-12-07T01:30:50.2153563Z'}}}" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customer_selection = available_tickets_df.loc[0]['$dtId']\n", + "\n", + "service_client.get_digital_twin(customer_selection)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "customer_selection = available_tickets_df.loc[0]['$dtId']\n", + "\n", + "patch = [\n", + " {\n", + " \"op\": \"replace\",\n", + " \"path\": \"\",\n", + " \"value\": \"sold\"\n", + " }\n", + "]\n", + "service_client.update_component(customer_selection,\"state\", patch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**NOTE** that the path is `\"\"` when the item is at the root of the state." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtIdevent_titlestate
0ticket-600f2149-1ed8-4cb4-b0b8-bb6d03f575cdNirvanasold
1ticket-b8860b9b-cc54-4591-abb3-2abc3adc0800Nirvanasold
2ticket-2256b8c9-1dd8-4ef8-a00a-b191dc122532Nirvanasold
3ticket-99023c6e-5d97-4e43-ab08-3d893233c8b0Nirvanasold
4ticket-132839a8-a4a3-4c75-ae96-9ff4085a07d4Nirvanasold
5ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbeNirvanaopen
6ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9Nirvanaopen
7ticket-cf70a216-0183-4458-8e13-94f432f7b8c0Nirvanaopen
8ticket-260e3349-adc5-41af-9ae5-4f3690d813c7Nirvanaopen
9ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717Nirvanaopen
\n", + "
" + ], + "text/plain": [ + " $dtId event_title state\n", + "0 ticket-600f2149-1ed8-4cb4-b0b8-bb6d03f575cd Nirvana sold\n", + "1 ticket-b8860b9b-cc54-4591-abb3-2abc3adc0800 Nirvana sold\n", + "2 ticket-2256b8c9-1dd8-4ef8-a00a-b191dc122532 Nirvana sold\n", + "3 ticket-99023c6e-5d97-4e43-ab08-3d893233c8b0 Nirvana sold\n", + "4 ticket-132839a8-a4a3-4c75-ae96-9ff4085a07d4 Nirvana sold\n", + "5 ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe Nirvana open\n", + "6 ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9 Nirvana open\n", + "7 ticket-cf70a216-0183-4458-8e13-94f432f7b8c0 Nirvana open\n", + "8 ticket-260e3349-adc5-41af-9ae5-4f3690d813c7 Nirvana open\n", + "9 ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717 Nirvana open" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression = f\"\"\"\n", + "SELECT * FROM digitaltwins where IS_OF_MODEL('{ticket_model_id}') \n", + "and event_title = '{selection_show}'\n", + "\"\"\"\n", + "query_result = service_client.query_twins(query_expression)\n", + "available_tickets_df = pd.DataFrame([[i['$dtId'],i['event_title'],i['state']] for i in query_result],\n", + " columns = ['$dtId','event_title','state'])\n", + "available_tickets_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Relationships: \n", + "* the target is the leaf (or the customer)\n", + "* the source is the branch (or the ticket)\n", + "\n", + "you can add extra stuff to the relationship. " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe\n", + "customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446\n" + ] + } + ], + "source": [ + "print(customer_selection)\n", + "print(user)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$relationshipId': 'ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbeownedBycustomer-cc04f3b6-39b0-4cef-bfff-a7d668cce446',\n", + " '$etag': 'W/\"63140a7e-b8e9-4638-909e-9adf67a05d0d\"',\n", + " '$sourceId': 'ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe',\n", + " '$relationshipName': 'ownedBy',\n", + " '$targetId': 'customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446',\n", + " 'bought_online': True}" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{customer_selection}ownedBy{user}\",\n", + " \"$sourceId\": customer_selection,\n", + " \"$relationshipName\": \"ownedBy\",\n", + " \"$targetId\": user,\n", + " \"bought_online\": True\n", + " }\n", + "\n", + "service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "14" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[i for i in service_client.list_relationships(customer_selection)]\n", + "i" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Another customer will buy several tickets. " + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtIdevent_titlestate
0ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16Smashing Pumpkinsopen
1ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56Foo Fightersopen
2ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3cFoo Fightersopen
3ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9Foo Fightersopen
4ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9Nirvanaopen
5ticket-cf70a216-0183-4458-8e13-94f432f7b8c0Nirvanaopen
6ticket-260e3349-adc5-41af-9ae5-4f3690d813c7Nirvanaopen
7ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717Nirvanaopen
8ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706cFoo Fightersopen
9ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67baSmashing Pumpkinsopen
10ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109Smashing Pumpkinsopen
11ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4Smashing Pumpkinsopen
12ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00Smashing Pumpkinsopen
13ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961Smashing Pumpkinsopen
14ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3Foo Fightersopen
15ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063Foo Fightersopen
16ticket-09028447-8e23-4e4f-a100-098a4dc8e919Foo Fightersopen
17ticket-ccc89397-0d52-42a3-8fe1-39c97158c87cFoo Fightersopen
\n", + "
" + ], + "text/plain": [ + " $dtId event_title state\n", + "0 ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16 Smashing Pumpkins open\n", + "1 ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56 Foo Fighters open\n", + "2 ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c Foo Fighters open\n", + "3 ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9 Foo Fighters open\n", + "4 ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9 Nirvana open\n", + "5 ticket-cf70a216-0183-4458-8e13-94f432f7b8c0 Nirvana open\n", + "6 ticket-260e3349-adc5-41af-9ae5-4f3690d813c7 Nirvana open\n", + "7 ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717 Nirvana open\n", + "8 ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706c Foo Fighters open\n", + "9 ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67ba Smashing Pumpkins open\n", + "10 ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109 Smashing Pumpkins open\n", + "11 ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4 Smashing Pumpkins open\n", + "12 ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00 Smashing Pumpkins open\n", + "13 ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961 Smashing Pumpkins open\n", + "14 ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3 Foo Fighters open\n", + "15 ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063 Foo Fighters open\n", + "16 ticket-09028447-8e23-4e4f-a100-098a4dc8e919 Foo Fighters open\n", + "17 ticket-ccc89397-0d52-42a3-8fe1-39c97158c87c Foo Fighters open" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression = f\"\"\"\n", + "SELECT * FROM digitaltwins t where IS_OF_MODEL('{ticket_model_id}') \n", + "and t.state = 'open'\n", + "\"\"\"\n", + "query_result = service_client.query_twins(query_expression)\n", + "available_tickets_df = pd.DataFrame([[i['$dtId'],i['event_title'],i['state']] for i in query_result],\n", + " columns = ['$dtId','event_title','state'])\n", + "available_tickets_df" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'customer-25e19268-3433-4f09-afe3-94f466313368'" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "other_user = df_customers.loc[len(df_customers)-1]['id']\n", + "other_user" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16',\n", + " 'ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56',\n", + " 'ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c',\n", + " 'ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9',\n", + " 'ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9',\n", + " 'ticket-cf70a216-0183-4458-8e13-94f432f7b8c0']" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tickets_bought = available_tickets_df.loc[:5,'$dtId'].tolist()\n", + "tickets_bought" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16',\n", + " 'ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56',\n", + " 'ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c',\n", + " 'ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9',\n", + " 'ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9',\n", + " 'ticket-cf70a216-0183-4458-8e13-94f432f7b8c0']" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tickets_bought" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16\n", + "ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56\n", + "ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c\n", + "ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9\n", + "ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9\n", + "ticket-cf70a216-0183-4458-8e13-94f432f7b8c0\n" + ] + } + ], + "source": [ + "for t in tickets_bought:\n", + " print(t)\n", + " tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{customer_selection}ownedBy{other_user}\",\n", + " \"$sourceId\": t,\n", + " \"$relationshipName\": \"ownedBy\",\n", + " \"$targetId\": other_user,\n", + " \"bought_online\": False\n", + " }\n", + "\n", + " service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )\n", + " customer_selection = available_tickets_df.loc[0]['$dtId']\n", + "\n", + " patch = [\n", + " {\n", + " \"op\": \"replace\",\n", + " \"path\": \"\",\n", + " \"value\": \"sold\"\n", + " }\n", + " ]\n", + " service_client.update_component(t,\"state\", patch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's lets look at the tickets. " + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtIdevent_titlestate
0ticket-600f2149-1ed8-4cb4-b0b8-bb6d03f575cdNirvanasold
1ticket-b8860b9b-cc54-4591-abb3-2abc3adc0800Nirvanasold
2ticket-2256b8c9-1dd8-4ef8-a00a-b191dc122532Nirvanasold
3ticket-18663c82-67a2-4791-b1c4-05db261af867Smashing Pumpkinssold
4ticket-99023c6e-5d97-4e43-ab08-3d893233c8b0Nirvanasold
5ticket-132839a8-a4a3-4c75-ae96-9ff4085a07d4Nirvanasold
6ticket-79f3f382-bd65-4788-8ad1-0db507b8a3abSmashing Pumpkinssold
7ticket-9f7c0906-9729-4568-a559-89d63b634d09Smashing Pumpkinssold
8ticket-633f4537-50a1-4735-9e70-99bf19be51e4Smashing Pumpkinssold
9ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16Smashing Pumpkinssold
10ticket-ca767da7-0152-4861-9be6-fbe39b5dd9d8Foo Fighterssold
11ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70Foo Fighterssold
12ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56Foo Fighterssold
13ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3cFoo Fighterssold
14ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9Foo Fighterssold
15ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbeNirvanasold
16ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9Nirvanasold
17ticket-cf70a216-0183-4458-8e13-94f432f7b8c0Nirvanasold
18ticket-260e3349-adc5-41af-9ae5-4f3690d813c7Nirvanaopen
19ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717Nirvanaopen
20ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706cFoo Fightersopen
21ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67baSmashing Pumpkinsopen
22ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109Smashing Pumpkinsopen
23ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4Smashing Pumpkinsopen
24ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00Smashing Pumpkinsopen
25ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961Smashing Pumpkinsopen
26ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3Foo Fightersopen
27ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063Foo Fightersopen
28ticket-09028447-8e23-4e4f-a100-098a4dc8e919Foo Fightersopen
29ticket-ccc89397-0d52-42a3-8fe1-39c97158c87cFoo Fightersopen
\n", + "
" + ], + "text/plain": [ + " $dtId event_title state\n", + "0 ticket-600f2149-1ed8-4cb4-b0b8-bb6d03f575cd Nirvana sold\n", + "1 ticket-b8860b9b-cc54-4591-abb3-2abc3adc0800 Nirvana sold\n", + "2 ticket-2256b8c9-1dd8-4ef8-a00a-b191dc122532 Nirvana sold\n", + "3 ticket-18663c82-67a2-4791-b1c4-05db261af867 Smashing Pumpkins sold\n", + "4 ticket-99023c6e-5d97-4e43-ab08-3d893233c8b0 Nirvana sold\n", + "5 ticket-132839a8-a4a3-4c75-ae96-9ff4085a07d4 Nirvana sold\n", + "6 ticket-79f3f382-bd65-4788-8ad1-0db507b8a3ab Smashing Pumpkins sold\n", + "7 ticket-9f7c0906-9729-4568-a559-89d63b634d09 Smashing Pumpkins sold\n", + "8 ticket-633f4537-50a1-4735-9e70-99bf19be51e4 Smashing Pumpkins sold\n", + "9 ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16 Smashing Pumpkins sold\n", + "10 ticket-ca767da7-0152-4861-9be6-fbe39b5dd9d8 Foo Fighters sold\n", + "11 ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70 Foo Fighters sold\n", + "12 ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56 Foo Fighters sold\n", + "13 ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c Foo Fighters sold\n", + "14 ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9 Foo Fighters sold\n", + "15 ticket-b10211a2-1a7a-4f6f-9fc3-d23daefe8cbe Nirvana sold\n", + "16 ticket-d1d2ad08-add8-4f5f-97cf-0b6516914cc9 Nirvana sold\n", + "17 ticket-cf70a216-0183-4458-8e13-94f432f7b8c0 Nirvana sold\n", + "18 ticket-260e3349-adc5-41af-9ae5-4f3690d813c7 Nirvana open\n", + "19 ticket-9f2bc5f4-b7d6-4f86-a1f8-c570cb263717 Nirvana open\n", + "20 ticket-b7a550e8-0fa8-46ff-8b4c-1a290a5d706c Foo Fighters open\n", + "21 ticket-57a45e6f-f42a-41b0-a0f5-68e5e0fd67ba Smashing Pumpkins open\n", + "22 ticket-7f414b3f-fde6-4cbc-ad58-86c7ccfb1109 Smashing Pumpkins open\n", + "23 ticket-2d69b397-3ec3-4588-9eaf-5eb931f1f8c4 Smashing Pumpkins open\n", + "24 ticket-e9fc6ad3-d7ad-4ef1-8a58-0759a0f27a00 Smashing Pumpkins open\n", + "25 ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961 Smashing Pumpkins open\n", + "26 ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3 Foo Fighters open\n", + "27 ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063 Foo Fighters open\n", + "28 ticket-09028447-8e23-4e4f-a100-098a4dc8e919 Foo Fighters open\n", + "29 ticket-ccc89397-0d52-42a3-8fe1-39c97158c87c Foo Fighters open" + ] + }, + "execution_count": 30, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression = f\"\"\"\n", + "SELECT * FROM digitaltwins t where IS_OF_MODEL('{ticket_model_id}') \n", + "\"\"\"\n", + "query_result = service_client.query_twins(query_expression)\n", + "available_tickets_df = pd.DataFrame([[i['$dtId'],i['event_title'],i['state']] for i in query_result],\n", + " columns = ['$dtId','event_title','state'])\n", + "available_tickets_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can see that our web app can easily filter on tickets that are open and give a person specific rows. We will get into more complicated analysis in the next steps." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (azure_test)", + "language": "python", + "name": "azure_test" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/03_Adding_a_bunch of_other_components.ipynb b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/03_Adding_a_bunch of_other_components.ipynb new file mode 100644 index 000000000000..578e852889ed --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/03_Adding_a_bunch of_other_components.ipynb @@ -0,0 +1,649 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Moving through a physical experience\n", + "## Some examples of IoT application that ping a digital twin\n", + "\n", + "This doc does not really teach anything new, but it will give us some exercise with the previous examples. We will need a more complicated model in future steps. \n", + "* Adding more connections.\n", + "* Making connections using ids. \n", + "\n", + "\n", + "[This is the SDK repo on Github](https://github.com/Azure/azure-sdk-for-python/tree/4559e19e2f3146a49f1eba1706bb798071f4a1f5/sdk/digitaltwins/azure-digitaltwins-core)\n", + "\n", + "[Here is the doc on the query language](https://docs.microsoft.com/en-us/azure/digital-twins/concepts-query-language)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 30, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from azure.identity import AzureCliCredential\n", + "from azure.digitaltwins.core import DigitalTwinsClient\n", + "\n", + "# using yaml instead of \n", + "import yaml\n", + "import uuid\n", + "\n", + "# using altair instead of matplotlib for vizuals\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "# you will get this from the ADT resource at portal.azure.com\n", + "your_digital_twin_url = \"home-test-twin.api.wcus.digitaltwins.azure.net\"\n", + "\n", + "azure_cli = AzureCliCredential()\n", + "service_client = DigitalTwinsClient(\n", + " your_digital_twin_url, azure_cli)\n", + "service_client" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "query_expression = \"SELECT * FROM digitaltwins t where IS_OF_MODEL('dtmi:billmanh:patron;1')\"\n", + "query_result = service_client.query_twins(query_expression)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Note** the query object loves to drop values. To keep from making multiple queries, save the data somewhere. " + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "values = []\n", + "for i in query_result:\n", + " values.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "df_customers = pd.DataFrame([[i['$dtId'],i['satisfaction']] for i in values],\n", + " columns=['id','satisfaction'])" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idsatisfaction
0customer-cc04f3b6-39b0-4cef-bfff-a7d668cce44610
1customer-3cbd5e60-957d-44ff-944f-9adb42a20a5210
2customer-5c454e2f-f70b-4352-b75a-958f1a49beba7
3customer-26196fee-5ffd-457a-86b7-192a998f3cf29
4customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c7
.........
79customer-45e9aa03-733d-4a99-b9d5-94f1c6b042149
80customer-0234cb48-1fa2-43e0-b69d-36a6ff2696669
81customer-75b2f757-faee-4a85-bc93-e6e9ff7cd8916
82customer-048f85a8-173e-4305-92b8-ead2a748b07f8
83customer-25e19268-3433-4f09-afe3-94f46631336810
\n", + "

84 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " id satisfaction\n", + "0 customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 10\n", + "1 customer-3cbd5e60-957d-44ff-944f-9adb42a20a52 10\n", + "2 customer-5c454e2f-f70b-4352-b75a-958f1a49beba 7\n", + "3 customer-26196fee-5ffd-457a-86b7-192a998f3cf2 9\n", + "4 customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c 7\n", + ".. ... ...\n", + "79 customer-45e9aa03-733d-4a99-b9d5-94f1c6b04214 9\n", + "80 customer-0234cb48-1fa2-43e0-b69d-36a6ff269666 9\n", + "81 customer-75b2f757-faee-4a85-bc93-e6e9ff7cd891 6\n", + "82 customer-048f85a8-173e-4305-92b8-ead2a748b07f 8\n", + "83 customer-25e19268-3433-4f09-afe3-94f466313368 10\n", + "\n", + "[84 rows x 2 columns]" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_customers" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "and a `df` of the tickets" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtIdevent_titlestate
0ticket-633f4537-50a1-4735-9e70-99bf19be51e4Smashing Pumpkinsopen
1ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16Smashing Pumpkinsopen
2ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70Foo Fightersopen
3ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56Foo Fightersopen
4ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3cFoo Fightersopen
5ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9Foo Fightersopen
\n", + "
" + ], + "text/plain": [ + " $dtId event_title state\n", + "0 ticket-633f4537-50a1-4735-9e70-99bf19be51e4 Smashing Pumpkins open\n", + "1 ticket-52e25a0a-06b1-428f-b6a2-479cafc45c16 Smashing Pumpkins open\n", + "2 ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70 Foo Fighters open\n", + "3 ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56 Foo Fighters open\n", + "4 ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c Foo Fighters open\n", + "5 ticket-3f82286d-7e0b-4df5-a491-d9e28f7c94f9 Foo Fighters open" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression = f\"\"\"\n", + "SELECT * FROM digitaltwins t where IS_OF_MODEL('dtmi:mymodels:ticket;1') and t.state='open'\n", + "\"\"\"\n", + "query_result = service_client.query_twins(query_expression)\n", + "available_tickets_df = pd.DataFrame([[i['$dtId'],i['event_title'],i['state']] for i in query_result],\n", + " columns = ['$dtId','event_title','state'])\n", + "available_tickets_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's sell a couple more of those tickets. " + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['ticket-633f4537-50a1-4735-9e70-99bf19be51e4',\n", + " 'ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70']" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tickets_sold = available_tickets_df.drop_duplicates(subset='event_title')['$dtId'].tolist()\n", + "\n", + "tickets_sold" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc',\n", + " 'customer-21e17d28-76c3-4c04-8df9-396703692a68']" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customers_sold = df_customers['id'].tolist()[5:7]\n", + "\n", + "customers_sold" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The purpose is to create a simulation. So I'm just hacking something out real quick to make our model look a little fuller." + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ticket-633f4537-50a1-4735-9e70-99bf19be51e4\n", + "ticket-0cd531c6-d81c-4c4c-84c9-9592a8f43e70\n" + ] + } + ], + "source": [ + "for c,t in enumerate(tickets_sold):\n", + " print(t)\n", + " tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{t}ownedBy{customers_sold[c]}\",\n", + " \"$sourceId\": t,\n", + " \"$relationshipName\": \"ownedBy\",\n", + " \"$targetId\": customers_sold[c],\n", + " \"bought_online\": False\n", + " }\n", + "\n", + " service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )\n", + "\n", + " patch = [\n", + " {\n", + " \"op\": \"replace\",\n", + " \"path\": \"\",\n", + " \"value\": \"sold\"\n", + " }\n", + " ]\n", + " service_client.update_component(t,\"state\", patch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You should be able to load the storage explorer at this point and look at the relationships. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "I'm going to add some areas. \n", + "* People go into areas when the concert begins.\n", + "* areas have capacity\n", + "\n", + "Also adding lines:\n", + "* Lines lead to other places, but one line can lead to many places (like the entrance).\n", + "* Lines have a capacity\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "metadata": {}, + "outputs": [], + "source": [ + "# # # Delete the model that you don't want. \n", + "# # service_client.delete_model(patron_model_id)\n", + "\n", + "# # Create it if you just deleted it.\n", + "# # area_model_json = yaml.safe_load(open(\"models/area.json\"))\n", + "# line_model_json = yaml.safe_load(open(\"models/Patron.json\"))\n", + "# service_client.create_models([line_model_json])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Ok now that we know what tickets we want to sell, let's push them to the digital twin ecosystem. This is exactly the same as what we did with `Customers` in step one. " + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "area-0\n", + "{'$dtId': 'area-0', '$etag': 'W/\"7789a5c1-8ab0-473d-9c0e-6c12abe8f20f\"', 'capacity': 3, 'status': 'open', '$metadata': {'$model': 'dtmi:billmanh:area;1', 'capacity': {'lastUpdateTime': '2020-11-29T23:47:16.3097488Z'}, 'status': {'lastUpdateTime': '2020-11-29T23:47:16.3097488Z'}}}\n", + "area-1\n", + "{'$dtId': 'area-1', '$etag': 'W/\"78d4f941-5c4f-4e55-9a24-e1061f35a0f9\"', 'capacity': 3, 'status': 'open', '$metadata': {'$model': 'dtmi:billmanh:area;1', 'capacity': {'lastUpdateTime': '2020-11-29T23:47:16.3994079Z'}, 'status': {'lastUpdateTime': '2020-11-29T23:47:16.3994079Z'}}}\n", + "area-2\n", + "{'$dtId': 'area-2', '$etag': 'W/\"8d9d633f-51e1-4814-bf7f-71ffa5b647e6\"', 'capacity': 3, 'status': 'open', '$metadata': {'$model': 'dtmi:billmanh:area;1', 'capacity': {'lastUpdateTime': '2020-11-29T23:47:16.4841446Z'}, 'status': {'lastUpdateTime': '2020-11-29T23:47:16.4841446Z'}}}\n", + "area-3\n", + "{'$dtId': 'area-3', '$etag': 'W/\"990a5327-b8e5-4051-9ce2-9a078ec3c0aa\"', 'capacity': 3, 'status': 'open', '$metadata': {'$model': 'dtmi:billmanh:area;1', 'capacity': {'lastUpdateTime': '2020-11-29T23:47:16.5562464Z'}, 'status': {'lastUpdateTime': '2020-11-29T23:47:16.5562464Z'}}}\n" + ] + } + ], + "source": [ + "for r in range(4):\n", + " digital_twin_id = f'area-{r}'\n", + " print(digital_twin_id)\n", + " dt_json = {\n", + " \"$metadata\": {\n", + " \"$model\": \"dtmi:mymodels:area;1\"\n", + " },\n", + " \"capacity\": 3,\n", + " \"status\": \"open\"\n", + " }\n", + " created_twin = service_client.upsert_digital_twin(digital_twin_id, dt_json)\n", + " print(created_twin)" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "line-0\n", + "{'$dtId': 'line-0', '$etag': 'W/\"efd0563b-244c-41b1-8a02-7d2f5bcb11cb\"', '$metadata': {'$model': 'dtmi:billmanh:line;1'}}\n", + "line-1\n", + "{'$dtId': 'line-1', '$etag': 'W/\"e96e3bcb-3829-4fd0-a042-6d5f866fbd0a\"', '$metadata': {'$model': 'dtmi:billmanh:line;1'}}\n", + "line-2\n", + "{'$dtId': 'line-2', '$etag': 'W/\"c2f75074-3491-47b4-a703-6ade5e9993ad\"', '$metadata': {'$model': 'dtmi:billmanh:line;1'}}\n", + "line-3\n", + "{'$dtId': 'line-3', '$etag': 'W/\"8d749fc0-c15f-448d-b246-8f2d9212a29d\"', '$metadata': {'$model': 'dtmi:billmanh:line;1'}}\n" + ] + } + ], + "source": [ + "for r in range(4):\n", + " digital_twin_id = f'line-{r}'\n", + " print(digital_twin_id)\n", + " dt_json = {\n", + " \"$metadata\": {\n", + " \"$model\": \"dtmi:mymodels:line;1\"\n", + " }\n", + " }\n", + " created_twin = service_client.upsert_digital_twin(digital_twin_id, dt_json)\n", + " print(created_twin)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now establishing a realtionship between the lines and the areas. If a person leaves one area they have to get into the line of the next area in order to get there. " + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [], + "source": [ + "def line_to_room(line,area,walking):\n", + " tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{line}leadsTo{area}\",\n", + " \"$sourceId\": line,\n", + " \"$relationshipName\": \"leadsTo\",\n", + " \"$targetId\": area,\n", + " \"walk_distance\": walking\n", + " }\n", + "\n", + " service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )\n", + " \n", + "line_to_room(\"line-0\",\"area-0\",5)\n", + "line_to_room(\"line-1\",\"area-1\",5)\n", + "line_to_room(\"line-2\",\"area-2\",5)\n", + "line_to_room(\"line-3\",\"area-3\",5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now I'm going to add some people into different parts of the experience. This will change from user to user, but you can use this as an example of how this would work on your twin. Use the queries above to get the specific ids of your twins." + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": {}, + "outputs": [], + "source": [ + "def cust_to_area(cust,area):\n", + " tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{cust}locatedIn{area}\",\n", + " \"$sourceId\": cust,\n", + " \"$relationshipName\": \"locatedIn\",\n", + " \"$targetId\": area,\n", + " }\n", + "\n", + " service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )\n", + " \n", + "cust_to_area(\"customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c\",\"line-1\")\n", + "cust_to_area(\"customer-21e17d28-76c3-4c04-8df9-396703692a68\",\"line-1\")\n", + "# cust_to_area(\"customer-25e19268-3433-4f09-afe3-94f466313368\",\"line-0\")\n", + "# cust_to_area(\"customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc\",\"line-2\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Sell a couple more tickets" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": {}, + "outputs": [], + "source": [ + "def ticket_to_cust(ticket,cust):\n", + " tickethoder_relationship = {\n", + " \"$relationshipId\": f\"{ticket}ownedBy{cust}\",\n", + " \"$sourceId\": ticket,\n", + " \"$relationshipName\": \"ownedBy\",\n", + " \"$targetId\": cust,\n", + " }\n", + "\n", + " service_client.upsert_relationship(\n", + " tickethoder_relationship[\"$sourceId\"],\n", + " tickethoder_relationship[\"$relationshipId\"],\n", + " tickethoder_relationship\n", + " )\n", + " \n", + "ticket_to_cust(\"ticket-58bf00bf-d5b8-4d06-9002-f8bf1693cc56\",\"customer-418cbc74-b101-4f50-8cf0-6075a2a8053c\")\n", + "ticket_to_cust(\"ticket-67f3cbfb-4b35-4e10-bf6d-3fd216093c3c\",\"customer-9c9b5c36-69f6-4f48-9362-4aaac4cb1be4\")\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (azure_test)", + "language": "python", + "name": "azure_test" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/04_Lots_on_Queries.ipynb b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/04_Lots_on_Queries.ipynb new file mode 100644 index 000000000000..0a6969916e4b --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/04_Lots_on_Queries.ipynb @@ -0,0 +1,1197 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Complex queries\n", + "## An example use of the Digital Twin\n", + "\n", + "In this notebook we are going to dive deep into queries:\n", + "* Examining the customer experience through the lens of different aspects of the experience. \n", + "\n", + "In our previous steps we made:\n", + "* Patrons, that have a `customer satisfaction`, a relationship with tickets, and locations.\n", + "* Tickets that are `owned by customers`\n", + "* Lines that lead to areas\n", + "* Areas where people are located. \n", + "\n", + "We will be doing a bunch of different queries on this theme. \n", + "\n", + "\n", + "[This is the SDK repo on Github](https://github.com/Azure/azure-sdk-for-python/tree/4559e19e2f3146a49f1eba1706bb798071f4a1f5/sdk/digitaltwins/azure-digitaltwins-core)\n", + "\n", + "[Here is the doc on the query language](https://docs.microsoft.com/en-us/azure/digital-twins/concepts-query-language)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from azure.identity import AzureCliCredential\n", + "from azure.digitaltwins.core import DigitalTwinsClient\n", + "\n", + "# using yaml instead of \n", + "import yaml\n", + "import uuid\n", + "\n", + "# using altair instead of matplotlib for vizuals\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", + "# you will get this from the ADT resource at portal.azure.com\n", + "your_digital_twin_url = \"home-test-twin.api.wcus.digitaltwins.azure.net\"\n", + "\n", + "azure_cli = AzureCliCredential()\n", + "service_client = DigitalTwinsClient(\n", + " your_digital_twin_url, azure_cli)\n", + "service_client" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "I'm going to set up a generic function that runs queries and gets the data. This will keep me from doing it over and over. \n", + "\n", + "**Note that with really large models this might perform poorly** I'm only doing this here as this example is very small. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtId$etagsatisfactiontotalWaitTime$metadataevent_titlestateticket_locationcapacitystatus
0customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\"10.010.0{'$model': 'dtmi:billmanh:patron;1', 'satisfac...NaNNaNNaNNaNNaN
1customer-3cbd5e60-957d-44ff-944f-9adb42a20a52W/\"5d7359d5-e0f6-44e7-80f8-2a3f191295f2\"10.010.0{'$model': 'dtmi:billmanh:patron;1', 'satisfac...NaNNaNNaNNaNNaN
2customer-5c454e2f-f70b-4352-b75a-958f1a49bebaW/\"d7a8af67-7b40-4ed8-94c4-553fb639e197\"7.00.0{'$model': 'dtmi:billmanh:patron;1', 'satisfac...NaNNaNNaNNaNNaN
3customer-26196fee-5ffd-457a-86b7-192a998f3cf2W/\"ee1d4d68-aa14-4d35-a029-7635675f24d7\"9.00.0{'$model': 'dtmi:billmanh:patron;1', 'satisfac...NaNNaNNaNNaNNaN
4customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32cW/\"d1242800-1c21-4bab-909f-8dc49ef0ce92\"7.00.0{'$model': 'dtmi:billmanh:patron;1', 'satisfac...NaNNaNNaNNaNNaN
.................................
117ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961W/\"5ea29c9e-a6be-4c02-979a-df1b2aa8cb5c\"NaNNaN{'$model': 'dtmi:billmanh:ticket;1', 'event_ti...Smashing Pumpkinsopen4NaNNaN
118ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3W/\"002990f3-4ed8-464c-a90e-4b83ae5362fb\"NaNNaN{'$model': 'dtmi:billmanh:ticket;1', 'event_ti...Foo Fightersopen1NaNNaN
119ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063W/\"22d419c0-a6da-4935-82e3-5b1f7236908e\"NaNNaN{'$model': 'dtmi:billmanh:ticket;1', 'event_ti...Foo Fightersopen2NaNNaN
120ticket-09028447-8e23-4e4f-a100-098a4dc8e919W/\"39cc602c-a41e-47bf-8da9-1a401419832a\"NaNNaN{'$model': 'dtmi:billmanh:ticket;1', 'event_ti...Foo Fightersopen3NaNNaN
121ticket-ccc89397-0d52-42a3-8fe1-39c97158c87cW/\"094b5fa9-4443-4923-b8f9-24201fb2f26f\"NaNNaN{'$model': 'dtmi:billmanh:ticket;1', 'event_ti...Foo Fightersopen4NaNNaN
\n", + "

122 rows × 10 columns

\n", + "
" + ], + "text/plain": [ + " $dtId \\\n", + "0 customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 \n", + "1 customer-3cbd5e60-957d-44ff-944f-9adb42a20a52 \n", + "2 customer-5c454e2f-f70b-4352-b75a-958f1a49beba \n", + "3 customer-26196fee-5ffd-457a-86b7-192a998f3cf2 \n", + "4 customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c \n", + ".. ... \n", + "117 ticket-dc48bc69-b1f5-4850-b5af-037c4ea72961 \n", + "118 ticket-92826a40-b024-4b91-a4d2-7e20dcfddce3 \n", + "119 ticket-8c483b5c-37d4-4027-8d67-d920bf8ca063 \n", + "120 ticket-09028447-8e23-4e4f-a100-098a4dc8e919 \n", + "121 ticket-ccc89397-0d52-42a3-8fe1-39c97158c87c \n", + "\n", + " $etag satisfaction totalWaitTime \\\n", + "0 W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\" 10.0 10.0 \n", + "1 W/\"5d7359d5-e0f6-44e7-80f8-2a3f191295f2\" 10.0 10.0 \n", + "2 W/\"d7a8af67-7b40-4ed8-94c4-553fb639e197\" 7.0 0.0 \n", + "3 W/\"ee1d4d68-aa14-4d35-a029-7635675f24d7\" 9.0 0.0 \n", + "4 W/\"d1242800-1c21-4bab-909f-8dc49ef0ce92\" 7.0 0.0 \n", + ".. ... ... ... \n", + "117 W/\"5ea29c9e-a6be-4c02-979a-df1b2aa8cb5c\" NaN NaN \n", + "118 W/\"002990f3-4ed8-464c-a90e-4b83ae5362fb\" NaN NaN \n", + "119 W/\"22d419c0-a6da-4935-82e3-5b1f7236908e\" NaN NaN \n", + "120 W/\"39cc602c-a41e-47bf-8da9-1a401419832a\" NaN NaN \n", + "121 W/\"094b5fa9-4443-4923-b8f9-24201fb2f26f\" NaN NaN \n", + "\n", + " $metadata event_title \\\n", + "0 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... NaN \n", + "1 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... NaN \n", + "2 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... NaN \n", + "3 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... NaN \n", + "4 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... NaN \n", + ".. ... ... \n", + "117 {'$model': 'dtmi:billmanh:ticket;1', 'event_ti... Smashing Pumpkins \n", + "118 {'$model': 'dtmi:billmanh:ticket;1', 'event_ti... Foo Fighters \n", + "119 {'$model': 'dtmi:billmanh:ticket;1', 'event_ti... Foo Fighters \n", + "120 {'$model': 'dtmi:billmanh:ticket;1', 'event_ti... Foo Fighters \n", + "121 {'$model': 'dtmi:billmanh:ticket;1', 'event_ti... Foo Fighters \n", + "\n", + " state ticket_location capacity status \n", + "0 NaN NaN NaN NaN \n", + "1 NaN NaN NaN NaN \n", + "2 NaN NaN NaN NaN \n", + "3 NaN NaN NaN NaN \n", + "4 NaN NaN NaN NaN \n", + ".. ... ... ... ... \n", + "117 open 4 NaN NaN \n", + "118 open 1 NaN NaN \n", + "119 open 2 NaN NaN \n", + "120 open 3 NaN NaN \n", + "121 open 4 NaN NaN \n", + "\n", + "[122 rows x 10 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def query_ADT(query):\n", + " query_result = service_client.query_twins(query)\n", + " values = [i for i in query_result]\n", + " return values\n", + "\n", + "def query_to_df(query):\n", + " query_result = query_ADT(query)\n", + " values = pd.DataFrame(query_result)\n", + " return values\n", + "\n", + "query_expression = \"SELECT * FROM digitaltwins\"\n", + "query_to_df(query_expression)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the larger query will give you back all of the values, so you can pop it into a dataframe and filter on the `$metadata` to get the values you want" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## IS_OF_MODEL\n", + "The process for most analysis is to query the items that are relevant into a dataframe and do your analysis on them. " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "count 84.000000\n", + "mean 7.142857\n", + "std 1.529965\n", + "min 5.000000\n", + "25% 6.000000\n", + "50% 7.000000\n", + "75% 8.250000\n", + "max 10.000000\n", + "Name: satisfaction, dtype: float64" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression = \"SELECT * FROM digitaltwins where IS_OF_MODEL('dtmi:mymodels:patron;1')\"\n", + "customers = query_to_df(query_expression)\n", + "customers.satisfaction.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TCT
0{'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7...{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...
1{'$dtId': 'customer-25e19268-3433-4f09-afe3-94...{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...
2{'$dtId': 'customer-c87adbfa-1c6e-4ea9-9f03-83...{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...
\n", + "
" + ], + "text/plain": [ + " T \\\n", + "0 {'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7... \n", + "1 {'$dtId': 'customer-25e19268-3433-4f09-afe3-94... \n", + "2 {'$dtId': 'customer-c87adbfa-1c6e-4ea9-9f03-83... \n", + "\n", + " CT \n", + "0 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... \n", + "1 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... \n", + "2 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... " + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_expression =\"\"\"\n", + "SELECT T, CT\n", + "FROM DIGITALTWINS T\n", + "JOIN CT RELATED T.locatedIn\n", + "WHERE CT.$dtId = 'line-2'\n", + "\"\"\"\n", + "\n", + "customers_in_area_2 = query_to_df(query_expression)\n", + "customers_in_area_2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Ok let's unpack that:\n", + "\n", + "`SELECT T, CT` - means to give short names to all the different classes, so in this case `T` refers to all twins, and CT refers to a related item. The related item comes out in the second column\n", + "\n", + "`RELATED T.locatedIn` - gits all of elements that have a `locatedIn` relationship and stores it in `CT`. \n", + "\n", + "`WHERE CT.$dtId = 'line-2'` - limits the query to items that have that relationship with `line-2`. This is the filter part. \n", + "\n", + "**Note** it seems that all _joined queries_ require a specific twin by name. \n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "T {'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7...\n", + "CT {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...\n", + "Name: 0, dtype: object" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customers_in_area_2.loc[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$dtId': 'line-2',\n", + " '$etag': 'W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\"',\n", + " 'capacity': 3,\n", + " '$metadata': {'$model': 'dtmi:billmanh:line;1',\n", + " 'capacity': {'lastUpdateTime': '2020-11-30T00:10:31.5945931Z'}}}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customers_in_area_2.loc[0,'CT']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So let's look at the customers in `line-2`" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446',\n", + " '$etag': 'W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\"',\n", + " 'satisfaction': 10,\n", + " 'totalWaitTime': 10,\n", + " '$metadata': {'$model': 'dtmi:billmanh:patron;1',\n", + " 'satisfaction': {'lastUpdateTime': '2020-11-19T02:17:30.1401999Z'},\n", + " 'totalWaitTime': {'lastUpdateTime': '2020-11-19T02:17:30.1401999Z'}}}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customers_in_area_2.loc[0,'T']" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
$dtId$etagsatisfactiontotalWaitTime$metadata
0customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\"1010{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
1customer-25e19268-3433-4f09-afe3-94f466313368W/\"91509c0d-7dc9-465b-a735-2fde2f67fe02\"1010{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
2customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fcW/\"44f9fd9b-141f-4603-8a0b-ad8a3730ed0d\"80{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
\n", + "
" + ], + "text/plain": [ + " $dtId \\\n", + "0 customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 \n", + "1 customer-25e19268-3433-4f09-afe3-94f466313368 \n", + "2 customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc \n", + "\n", + " $etag satisfaction totalWaitTime \\\n", + "0 W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\" 10 10 \n", + "1 W/\"91509c0d-7dc9-465b-a735-2fde2f67fe02\" 10 10 \n", + "2 W/\"44f9fd9b-141f-4603-8a0b-ad8a3730ed0d\" 8 0 \n", + "\n", + " $metadata \n", + "0 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "1 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "2 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... " + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l2_cust = pd.DataFrame(customers_in_area_2['T'].tolist())\n", + "l2_cust" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "count 3.000000\n", + "mean 9.333333\n", + "std 1.154701\n", + "min 8.000000\n", + "25% 9.000000\n", + "50% 10.000000\n", + "75% 10.000000\n", + "max 10.000000\n", + "Name: satisfaction, dtype: float64" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l2_cust.satisfaction.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "count 84.000000\n", + "mean 7.142857\n", + "std 1.529965\n", + "min 5.000000\n", + "25% 6.000000\n", + "50% 7.000000\n", + "75% 8.250000\n", + "max 10.000000\n", + "Name: satisfaction, dtype: float64" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "customers.satisfaction.describe()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Customers in line 2 have higher satisfaction than customers in general. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## How many people in each line" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
COUNT
05
\n", + "
" + ], + "text/plain": [ + " COUNT\n", + "0 5" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query = \"\"\"\n", + "SELECT COUNT() \n", + "FROM DIGITALTWINS T \n", + "JOIN CT RELATED T.locatedIn \n", + "WHERE CT.$dtId IN ['line-0','line-1','line-2', 'line-3']\n", + "\"\"\"\n", + "\n", + "customers_in_lines = query_to_df(query)\n", + "customers_in_lines" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
COUNT
03
\n", + "
" + ], + "text/plain": [ + " COUNT\n", + "0 3" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query = \"\"\"\n", + "SELECT COUNT() \n", + "FROM DIGITALTWINS T \n", + "JOIN CT RELATED T.locatedIn \n", + "WHERE CT.$dtId IN ['line-2']\n", + "\"\"\"\n", + "\n", + "customers_in_lines = query_to_df(query)\n", + "customers_in_lines" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The rough part is that you can only get one count back, not a count per line, like you could with propper SQL. You also have to hard code all of your `$dtID` as they require literal values. Lame.\n", + "\n", + "Here is the way around that. \n", + "\n", + "`SELECT line, customer` <- select the columns that you want the query to return\n", + "\n", + "`AND IS_OF_MODEL(customer, 'dtmi:billmanh:patron;1')` <- specify that `customer` are twins of the `patron` model. \n", + "\n", + "You still have to hard type the names of the lines, or rooms or whatever, but it returns all of the customers in all of the lines. " + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
linecustomer
0{'$dtId': 'line-1', '$etag': 'W/\"aa93fee2-cba0...{'$dtId': 'customer-e6f49d8a-711b-41c3-9db8-c7...
1{'$dtId': 'line-1', '$etag': 'W/\"aa93fee2-cba0...{'$dtId': 'customer-21e17d28-76c3-4c04-8df9-39...
2{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...{'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7...
3{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...{'$dtId': 'customer-25e19268-3433-4f09-afe3-94...
4{'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2...{'$dtId': 'customer-c87adbfa-1c6e-4ea9-9f03-83...
\n", + "
" + ], + "text/plain": [ + " line \\\n", + "0 {'$dtId': 'line-1', '$etag': 'W/\"aa93fee2-cba0... \n", + "1 {'$dtId': 'line-1', '$etag': 'W/\"aa93fee2-cba0... \n", + "2 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... \n", + "3 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... \n", + "4 {'$dtId': 'line-2', '$etag': 'W/\"dd64cd24-6ec2... \n", + "\n", + " customer \n", + "0 {'$dtId': 'customer-e6f49d8a-711b-41c3-9db8-c7... \n", + "1 {'$dtId': 'customer-21e17d28-76c3-4c04-8df9-39... \n", + "2 {'$dtId': 'customer-cc04f3b6-39b0-4cef-bfff-a7... \n", + "3 {'$dtId': 'customer-25e19268-3433-4f09-afe3-94... \n", + "4 {'$dtId': 'customer-c87adbfa-1c6e-4ea9-9f03-83... " + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query = \"\"\"\n", + "SELECT line, customer\n", + "FROM DIGITALTWINS customer\n", + "JOIN line RELATED customer.locatedIn\n", + "WHERE line.$dtId IN ['line-0','line-1','line-2', 'line-3']\n", + "AND IS_OF_MODEL(customer, 'dtmi:mymodels:patron;1')\n", + "\"\"\"\n", + "\n", + "customers_in_lines = query_to_df(query)\n", + "customers_in_lines" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Easy enough to munge it into a dataframe:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
line$etagcapacity$metadatacustomer$etagsatisfactiontotalWaitTime$metadata
0line-1W/\"aa93fee2-cba0-4bd4-a020-114f058b0f01\"3{'$model': 'dtmi:billmanh:line;1', 'capacity':...customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32cW/\"d1242800-1c21-4bab-909f-8dc49ef0ce92\"70{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
1line-1W/\"aa93fee2-cba0-4bd4-a020-114f058b0f01\"3{'$model': 'dtmi:billmanh:line;1', 'capacity':...customer-21e17d28-76c3-4c04-8df9-396703692a68W/\"c62357a9-f4e5-4ec6-9d60-08b0da1125a7\"80{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
2line-2W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\"3{'$model': 'dtmi:billmanh:line;1', 'capacity':...customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\"1010{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
3line-2W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\"3{'$model': 'dtmi:billmanh:line;1', 'capacity':...customer-25e19268-3433-4f09-afe3-94f466313368W/\"91509c0d-7dc9-465b-a735-2fde2f67fe02\"1010{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
4line-2W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\"3{'$model': 'dtmi:billmanh:line;1', 'capacity':...customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fcW/\"44f9fd9b-141f-4603-8a0b-ad8a3730ed0d\"80{'$model': 'dtmi:billmanh:patron;1', 'satisfac...
\n", + "
" + ], + "text/plain": [ + " line $etag capacity \\\n", + "0 line-1 W/\"aa93fee2-cba0-4bd4-a020-114f058b0f01\" 3 \n", + "1 line-1 W/\"aa93fee2-cba0-4bd4-a020-114f058b0f01\" 3 \n", + "2 line-2 W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\" 3 \n", + "3 line-2 W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\" 3 \n", + "4 line-2 W/\"dd64cd24-6ec2-452d-b17a-83ebe4bcd73a\" 3 \n", + "\n", + " $metadata \\\n", + "0 {'$model': 'dtmi:billmanh:line;1', 'capacity':... \n", + "1 {'$model': 'dtmi:billmanh:line;1', 'capacity':... \n", + "2 {'$model': 'dtmi:billmanh:line;1', 'capacity':... \n", + "3 {'$model': 'dtmi:billmanh:line;1', 'capacity':... \n", + "4 {'$model': 'dtmi:billmanh:line;1', 'capacity':... \n", + "\n", + " customer \\\n", + "0 customer-e6f49d8a-711b-41c3-9db8-c7ece3dbc32c \n", + "1 customer-21e17d28-76c3-4c04-8df9-396703692a68 \n", + "2 customer-cc04f3b6-39b0-4cef-bfff-a7d668cce446 \n", + "3 customer-25e19268-3433-4f09-afe3-94f466313368 \n", + "4 customer-c87adbfa-1c6e-4ea9-9f03-83e3877ef5fc \n", + "\n", + " $etag satisfaction totalWaitTime \\\n", + "0 W/\"d1242800-1c21-4bab-909f-8dc49ef0ce92\" 7 0 \n", + "1 W/\"c62357a9-f4e5-4ec6-9d60-08b0da1125a7\" 8 0 \n", + "2 W/\"158b01df-d555-4c1b-bf44-9c1e8c63eb02\" 10 10 \n", + "3 W/\"91509c0d-7dc9-465b-a735-2fde2f67fe02\" 10 10 \n", + "4 W/\"44f9fd9b-141f-4603-8a0b-ad8a3730ed0d\" 8 0 \n", + "\n", + " $metadata \n", + "0 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "1 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "2 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "3 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... \n", + "4 {'$model': 'dtmi:billmanh:patron;1', 'satisfac... " + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c_in_line = pd.concat(\n", + " [pd.DataFrame(customers_in_lines['line'].tolist()),\n", + " pd.DataFrame(customers_in_lines['customer'].tolist())],\n", + " axis=1\n", + ")\n", + "\n", + "cols = c_in_line.columns.tolist()\n", + "cols[0] = 'line'\n", + "cols[4] = 'customer'\n", + "c_in_line.columns = cols\n", + "c_in_line" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "How many people are in each line: " + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "line\n", + "line-1 2\n", + "line-2 3\n", + "Name: customer, dtype: int64" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c_in_line.groupby('line').count()['customer']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Which group of people has the highest satisfaction?" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "line\n", + "line-1 7.500000\n", + "line-2 9.333333\n", + "Name: satisfaction, dtype: float64" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c_in_line.groupby('line').mean()['satisfaction']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (azure_test)", + "language": "python", + "name": "azure_test" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/Patron.json b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/Patron.json new file mode 100644 index 000000000000..7a34a5945d00 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/Patron.json @@ -0,0 +1,43 @@ +{ + "@id": "dtmi:mymodels:patron;1", + "@type": "Interface", + "displayName": "Patron", + "contents": [ + { + "@type": "Command", + "name": "order", + "request": { + "name": "wallet", + "displayName": "available money", + "description": "how much the patron is willing to spend", + "schema": "double" + }, + "response": { + "name": "decision", + "displayName": "consumer decision", + "schema": "string" + } + }, + { + "@type": "Property", + "name": "totalWaitTime", + "displayName": "Total Wait Time", + "schema": "double", + "comment": "time in seconds that the customer is required to wait" + }, + { + "@type": "Property", + "name": "satisfaction", + "displayName": "Customer Satisfaction", + "schema": "integer", + "comment": "1-10 scale of how likely to recommend to a friend (NPS)" + }, + { + "@type": "Relationship", + "name": "locatedIn" + } + ], + "@context": "dtmi:dtdl:context;2", + "comment": "This should represent one restaurant visitor.", + "description": "As an example, contains all of the properties possible in the DTDL." +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/area.json b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/area.json new file mode 100644 index 000000000000..90f84d04aa35 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/area.json @@ -0,0 +1,28 @@ +{ + "@id": "dtmi:mymodels:area;1", + "@type": "Interface", + "displayName": "area", + "contents": [ + { + "@type": "Property", + "name": "capacity", + "displayName": "capacity", + "schema": "double", + "comment": "How many people can safely be in this area." + }, + { + "@type": "Property", + "name": "status", + "displayName": "status", + "schema": "string", + "comment": "open, closed, etc." + }, + { + "@type": "Relationship", + "name": "occupiedBy" + } + ], + "@context": "dtmi:dtdl:context;2", + "comment": "A number of people in a room", + "description": "a room" +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/line.json b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/line.json new file mode 100644 index 000000000000..20004091a117 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/line.json @@ -0,0 +1,35 @@ +{ + "@id": "dtmi:mymodels:line;1", + "@type": "Interface", + "displayName": "line", + "contents": [ + { + "@type": "Property", + "name": "capacity", + "displayName": "capacity", + "schema": "double", + "comment": "How many people can safely wait in this line." + }, + { + "@type": "Property", + "name": "status", + "displayName": "status", + "schema": "string", + "comment": "current status of the line" + }, + { + "@type": "Relationship", + "name": "leadsTo", + "properties": [ + { + "@type": "Property", + "name": "walk_distance", + "schema": "double" + } + ] + } + ], + "@context": "dtmi:dtdl:context;2", + "comment": "lines can hold a number of people.", + "description": "a group of people waiting to get into a place." +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/ticket.json b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/ticket.json new file mode 100644 index 000000000000..94f4ef3d8a01 --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/models/ticket.json @@ -0,0 +1,63 @@ +{ + "@id": "dtmi:mymodels:ticket;1", + "@type": "Interface", + "displayName": "ticket", + "contents": [ + { + "@type": "Property", + "name": "purchaselocation_lat", + "displayName": "puchased location (latitude)", + "schema": "double", + "comment": "latitude of the place where the ticket was purchased" + }, + { + "@type": "Property", + "name": "purchaselocation_long", + "displayName": "puchased location", + "schema": "double", + "comment": "longitude of the place where the ticket was purchased" + }, + { + "@type": "Property", + "name": "sold_to", + "displayName": "sold to", + "schema": "string", + "comment": "UID of the purchaser from customer data" + }, + { + "@type": "Property", + "name": "event_title", + "displayName": "event name", + "schema": "string", + "comment": "name or id of the event" + }, + { + "@type": "Property", + "name": "ticket_location", + "displayName": "ticket location", + "schema": "string", + "comment": "location of the seat, for demonstration this is just a number" + }, + { + "@type": "Property", + "name": "state", + "displayName": "state", + "schema": "string", + "comment": "current state of the ticket" + }, + { + "@type": "Relationship", + "name": "ownedBy", + "properties": [ + { + "@type": "Property", + "name": "bought_online", + "schema": "boolean" + } + ] + } + ], + "@context": "dtmi:dtdl:context;2", + "comment": "A number of tickets are placed into ADT before purchase with the initial state: open", + "description": "an abstract ticket" +} \ No newline at end of file diff --git a/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/readme.md b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/readme.md new file mode 100644 index 000000000000..160a69239fdd --- /dev/null +++ b/sdk/digitaltwins/azure-digitaltwins-core/samples/notebooks/readme.md @@ -0,0 +1,20 @@ +## Digital Twin Tutorial + +These notebooks will show you how to create a graph and interact with it through the Python API. It will build a venue and allow you to measure the customer satisfaction of people as the pass through the system. + + +You will need: +* A conda environment with the latest `Azure-Digital-Twin SDK` +* The Azure CLI tools for authentication +* The digital twin explorer +* A digital twin account, where you have sufficient permission. + + +The perspective of these notebooks is from the role of the **Analyst** not an application. The purpose is to use the graph to get insights on customers. This would allow you to query the API in real time, as events are happening. + +| notebook | purpose | +|----------|:-------------:| +| 01 | The basics of connecting, uploading models and creating twins | +| 02 | More details about creating relationships, updating values and basic queries | +| 03 | Some examples of arbitrary model changes that I made to shape the model | +| 04 | More detail about running queries with some examples of how to get insights | diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py index d88cb4b417ca..168927875bb3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py @@ -7,7 +7,6 @@ import time import logging import calendar -import dateutil.parser from azure.core import MatchConditions from azure.eventhub import CheckpointStore # type: ignore # pylint: disable=no-name-in-module from azure.eventhub.exceptions import OwnershipLostError # type: ignore @@ -18,6 +17,8 @@ ) from ._vendor.data.tables import TableClient, UpdateMode from ._vendor.data.tables._base_client import parse_connection_str +from ._vendor.data.tables._deserialize import clean_up_dotnet_timestamps +from ._vendor.data.tables._common_conversion import TZ_UTC logger = logging.getLogger(__name__) @@ -39,6 +40,19 @@ def _to_timestamp(date): timestamp += date.microsecond / 1e6 return timestamp +def _timestamp_to_datetime(value): + # Cosmos returns this with a decimal point that throws an error on deserialization + cleaned_value = clean_up_dotnet_timestamps(value) + try: + dt_obj = datetime.datetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%S.%fZ").replace( + tzinfo=TZ_UTC + ) + except ValueError: + dt_obj = datetime.datetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%SZ").replace( + tzinfo=TZ_UTC + ) + return dt_obj + class TableCheckpointStore(CheckpointStore): """A CheckpointStore that uses Azure Table Storage to store the partition ownership and checkpoint data. @@ -113,13 +127,13 @@ def _create_ownership_entity(cls, ownership): Create a dictionary with the `ownership` attributes. """ ownership_entity = { - "PartitionKey": "{} {} {} Ownership".format( + "PartitionKey": u"{} {} {} Ownership".format( ownership["fully_qualified_namespace"], ownership["eventhub_name"], ownership["consumer_group"], ), - "RowKey": ownership["partition_id"], - "ownerid": ownership["owner_id"], + "RowKey": u"{}".format(ownership["partition_id"]), + "ownerid": u"{}".format(ownership["owner_id"]), } return ownership_entity @@ -129,21 +143,21 @@ def _create_checkpoint_entity(cls, checkpoint): Create a dictionary with `checkpoint` attributes. """ checkpoint_entity = { - "PartitionKey": "{} {} {} Checkpoint".format( + "PartitionKey": u"{} {} {} Checkpoint".format( checkpoint["fully_qualified_namespace"], checkpoint["eventhub_name"], checkpoint["consumer_group"], ), - "RowKey": checkpoint["partition_id"], - "offset": checkpoint["offset"], - "sequencenumber": checkpoint["sequence_number"], + "RowKey": u"{}".format(checkpoint["partition_id"]), + "offset": u"{}".format(checkpoint["offset"]), + "sequencenumber": u"{}".format(checkpoint["sequence_number"]), } return checkpoint_entity def _update_ownership(self, ownership, **kwargs): """_update_ownership mutates the passed in ownership.""" + ownership_entity = TableCheckpointStore._create_ownership_entity(ownership) try: - ownership_entity = TableCheckpointStore._create_ownership_entity(ownership) metadata = self._table_client.update_entity( mode=UpdateMode.REPLACE, entity=ownership_entity, @@ -166,7 +180,7 @@ def _update_ownership(self, ownership, **kwargs): ) ownership["etag"] = metadata["etag"] ownership["last_modified_time"] = _to_timestamp( - dateutil.parser.isoparse(metadata["content"]["Timestamp"]) + _timestamp_to_datetime(metadata["content"]["Timestamp"]) ) def _claim_one_partition(self, ownership, **kwargs): @@ -289,7 +303,7 @@ def list_checkpoints( "eventhub_name": eventhub_name, "consumer_group": consumer_group, "partition_id": entity[u"RowKey"], - "sequence_number": entity[u"sequencenumber"], + "sequence_number": int(entity[u"sequencenumber"]), "offset": str(entity[u"offset"]), } checkpoints_list.append(checkpoint) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py index 33e60c134464..c19b0cb6ffc4 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py @@ -13,14 +13,15 @@ from azure.eventhub.extensions.checkpointstoretable import TableCheckpointStore from azure.eventhub.exceptions import OwnershipLostError -STORAGE_CONN_STR = [ - #os.environ.get("AZURE_STORAGE_CONN_STR", "Azure Storage Connection String"), - os.environ.get("AZURE_COSMOS_CONN_STR", "Azure Storage Connection String"), +STORAGE_ENV_KEYS = [ + "AZURE_TABLES_CONN_STR", + "AZURE_COSMOS_CONN_STR" ] -def get_live_storage_table_client(storage_connection_str): +def get_live_storage_table_client(conn_str_env_key): try: + storage_connection_str = os.environ[conn_str_env_key] table_name = "table{}".format(uuid.uuid4().hex) table_service_client = TableServiceClient.from_connection_string( storage_connection_str @@ -176,11 +177,11 @@ def _update_and_list_checkpoint(storage_connection_str, table_name): assert checkpoint_list[0]["offset"] == "30" -@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) -@pytest.mark.skip("update after adding conn str env var") -def test_claim_ownership_exception(storage_connection_str): +@pytest.mark.parametrize("conn_str_env_key", STORAGE_ENV_KEYS) +@pytest.mark.liveTest +def test_claim_ownership_exception(conn_str_env_key): storage_connection_str, table_name = get_live_storage_table_client( - storage_connection_str + conn_str_env_key ) try: _claim_ownership_exception_test(storage_connection_str, table_name) @@ -188,11 +189,11 @@ def test_claim_ownership_exception(storage_connection_str): remove_live_storage_table_client(storage_connection_str, table_name) -@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) -@pytest.mark.skip("update after adding conn str env var") -def test_claim_and_list_ownership(storage_connection_str): +@pytest.mark.parametrize("conn_str_env_key", STORAGE_ENV_KEYS) +@pytest.mark.liveTest +def test_claim_and_list_ownership(conn_str_env_key): storage_connection_str, table_name = get_live_storage_table_client( - storage_connection_str + conn_str_env_key ) try: _claim_and_list_ownership(storage_connection_str, table_name) @@ -200,11 +201,11 @@ def test_claim_and_list_ownership(storage_connection_str): remove_live_storage_table_client(storage_connection_str, table_name) -@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) -@pytest.mark.skip("update after adding conn str env var") -def test_update_checkpoint(storage_connection_str): +@pytest.mark.parametrize("conn_str_env_key", STORAGE_ENV_KEYS) +@pytest.mark.liveTest +def test_update_checkpoint(conn_str_env_key): storage_connection_str, table_name = get_live_storage_table_client( - storage_connection_str + conn_str_env_key ) try: _update_and_list_checkpoint(storage_connection_str, table_name) diff --git a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_consumer_client_async.py b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_consumer_client_async.py index 41c2a950b629..0ff6ae711cbf 100644 --- a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_consumer_client_async.py +++ b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_consumer_client_async.py @@ -16,7 +16,7 @@ async def test_receive_no_partition_async(connstr_senders): async def on_event(partition_context, event): on_event.received += 1 - await partition_context.update_checkpoint(event, fake_kwarg="arg") # ignores fake_kwarg + await partition_context.update_checkpoint(event) on_event.namespace = partition_context.fully_qualified_namespace on_event.eventhub_name = partition_context.eventhub_name on_event.consumer_group = partition_context.consumer_group diff --git a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_receive_async.py b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_receive_async.py index cd7de4f46037..6fb153052023 100644 --- a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_receive_async.py +++ b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_receive_async.py @@ -10,7 +10,7 @@ from azure.eventhub import EventData, TransportType from azure.eventhub.exceptions import EventHubError -from azure.eventhub.aio import EventHubConsumerClient +from azure.eventhub.aio import EventHubProducerClient, EventHubConsumerClient @pytest.mark.liveTest @@ -30,16 +30,27 @@ async def on_event(partition_context, event): on_event.called = False connection_str, senders = connstr_senders + # test async producer client + producer_client = EventHubProducerClient.from_connection_string(connection_str) + partitions = await producer_client.get_partition_ids() + senders = [] + for p in partitions: + sender = producer_client._create_producer(partition_id=p) + senders.append(sender) + client = EventHubConsumerClient.from_connection_string(connection_str, consumer_group='$default') async with client: task = asyncio.ensure_future(client.receive(on_event, partition_id="0", starting_position="@latest")) await asyncio.sleep(10) assert on_event.called is False - senders[0].send(EventData(b"Receiving only a single event"), partition_key='0') + await senders[0].send(EventData(b"Receiving only a single event"), partition_key='0') await asyncio.sleep(10) assert on_event.called is True await task + for s in senders: + await s.close() + await producer_client.close() @pytest.mark.parametrize("position, inclusive, expected_result", diff --git a/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_consumer_client.py b/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_consumer_client.py index 5a2dca789f37..8da5ddeb6ead 100644 --- a/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_consumer_client.py +++ b/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_consumer_client.py @@ -17,7 +17,7 @@ def test_receive_no_partition(connstr_senders): def on_event(partition_context, event): on_event.received += 1 - partition_context.update_checkpoint(event, fake_kwarg="arg") # ignores fake_kwarg + partition_context.update_checkpoint(event) on_event.namespace = partition_context.fully_qualified_namespace on_event.eventhub_name = partition_context.eventhub_name on_event.consumer_group = partition_context.consumer_group diff --git a/sdk/eventhub/azure-mgmt-eventhub/CHANGELOG.md b/sdk/eventhub/azure-mgmt-eventhub/CHANGELOG.md index 331bb73192fa..dceaea924cd1 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/CHANGELOG.md +++ b/sdk/eventhub/azure-mgmt-eventhub/CHANGELOG.md @@ -1,5 +1,15 @@ # Release History +## 9.1.0 (2021-09-17) + +**Features** + + - Model Cluster has a new parameter system_data + - Model EHNamespace has a new parameter disable_local_auth + - Model EHNamespace has a new parameter status + - Model NetworkRuleSet has a new parameter public_network_access + - Added operation ClustersOperations.list_by_subscription + ## 9.0.0 (2021-05-25) **Features** diff --git a/sdk/eventhub/azure-mgmt-eventhub/_meta.json b/sdk/eventhub/azure-mgmt-eventhub/_meta.json index 720230c56131..76188907d20b 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/_meta.json +++ b/sdk/eventhub/azure-mgmt-eventhub/_meta.json @@ -1,11 +1,11 @@ { - "autorest": "3.4.2", + "autorest": "3.4.5", "use": [ - "@autorest/python@5.8.0", - "@autorest/modelerfour@4.19.1" + "@autorest/python@5.8.4", + "@autorest/modelerfour@4.19.2" ], - "commit": "243dc16d38ca0f0d251efa2d216468a670cd8beb", + "commit": "c9992af7235a6550087d4fed8f081ed35019f605", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/eventhub/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.0 --use=@autorest/modelerfour@4.19.1 --version=3.4.2", + "autorest_command": "autorest specification/eventhub/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", "readme": "specification/eventhub/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_event_hub_management_client.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_event_hub_management_client.py index 8d43eb07f8b5..54186c7530a8 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_event_hub_management_client.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_event_hub_management_client.py @@ -95,6 +95,7 @@ def models(cls, api_version=DEFAULT_API_VERSION): * 2017-04-01: :mod:`v2017_04_01.models` * 2018-01-01-preview: :mod:`v2018_01_01_preview.models` * 2021-01-01-preview: :mod:`v2021_01_01_preview.models` + * 2021-06-01-preview: :mod:`v2021_06_01_preview.models` """ if api_version == '2015-08-01': from .v2015_08_01 import models @@ -108,6 +109,9 @@ def models(cls, api_version=DEFAULT_API_VERSION): elif api_version == '2021-01-01-preview': from .v2021_01_01_preview import models return models + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview import models + return models raise ValueError("API version {} is not available".format(api_version)) @property @@ -115,10 +119,13 @@ def clusters(self): """Instance depends on the API version: * 2018-01-01-preview: :class:`ClustersOperations` + * 2021-06-01-preview: :class:`ClustersOperations` """ api_version = self._get_api_version('clusters') if api_version == '2018-01-01-preview': from .v2018_01_01_preview.operations import ClustersOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import ClustersOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'clusters'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -128,10 +135,13 @@ def configuration(self): """Instance depends on the API version: * 2018-01-01-preview: :class:`ConfigurationOperations` + * 2021-06-01-preview: :class:`ConfigurationOperations` """ api_version = self._get_api_version('configuration') if api_version == '2018-01-01-preview': from .v2018_01_01_preview.operations import ConfigurationOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import ConfigurationOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'configuration'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -144,6 +154,7 @@ def consumer_groups(self): * 2017-04-01: :class:`ConsumerGroupsOperations` * 2018-01-01-preview: :class:`ConsumerGroupsOperations` * 2021-01-01-preview: :class:`ConsumerGroupsOperations` + * 2021-06-01-preview: :class:`ConsumerGroupsOperations` """ api_version = self._get_api_version('consumer_groups') if api_version == '2015-08-01': @@ -154,6 +165,8 @@ def consumer_groups(self): from .v2018_01_01_preview.operations import ConsumerGroupsOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import ConsumerGroupsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import ConsumerGroupsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'consumer_groups'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -165,6 +178,7 @@ def disaster_recovery_configs(self): * 2017-04-01: :class:`DisasterRecoveryConfigsOperations` * 2018-01-01-preview: :class:`DisasterRecoveryConfigsOperations` * 2021-01-01-preview: :class:`DisasterRecoveryConfigsOperations` + * 2021-06-01-preview: :class:`DisasterRecoveryConfigsOperations` """ api_version = self._get_api_version('disaster_recovery_configs') if api_version == '2017-04-01': @@ -173,6 +187,8 @@ def disaster_recovery_configs(self): from .v2018_01_01_preview.operations import DisasterRecoveryConfigsOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import DisasterRecoveryConfigsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import DisasterRecoveryConfigsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'disaster_recovery_configs'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -185,6 +201,7 @@ def event_hubs(self): * 2017-04-01: :class:`EventHubsOperations` * 2018-01-01-preview: :class:`EventHubsOperations` * 2021-01-01-preview: :class:`EventHubsOperations` + * 2021-06-01-preview: :class:`EventHubsOperations` """ api_version = self._get_api_version('event_hubs') if api_version == '2015-08-01': @@ -195,6 +212,8 @@ def event_hubs(self): from .v2018_01_01_preview.operations import EventHubsOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import EventHubsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import EventHubsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'event_hubs'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -207,6 +226,7 @@ def namespaces(self): * 2017-04-01: :class:`NamespacesOperations` * 2018-01-01-preview: :class:`NamespacesOperations` * 2021-01-01-preview: :class:`NamespacesOperations` + * 2021-06-01-preview: :class:`NamespacesOperations` """ api_version = self._get_api_version('namespaces') if api_version == '2015-08-01': @@ -217,6 +237,8 @@ def namespaces(self): from .v2018_01_01_preview.operations import NamespacesOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import NamespacesOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import NamespacesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'namespaces'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -229,6 +251,7 @@ def operations(self): * 2017-04-01: :class:`Operations` * 2018-01-01-preview: :class:`Operations` * 2021-01-01-preview: :class:`Operations` + * 2021-06-01-preview: :class:`Operations` """ api_version = self._get_api_version('operations') if api_version == '2015-08-01': @@ -239,6 +262,8 @@ def operations(self): from .v2018_01_01_preview.operations import Operations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import Operations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -249,12 +274,15 @@ def private_endpoint_connections(self): * 2018-01-01-preview: :class:`PrivateEndpointConnectionsOperations` * 2021-01-01-preview: :class:`PrivateEndpointConnectionsOperations` + * 2021-06-01-preview: :class:`PrivateEndpointConnectionsOperations` """ api_version = self._get_api_version('private_endpoint_connections') if api_version == '2018-01-01-preview': from .v2018_01_01_preview.operations import PrivateEndpointConnectionsOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import PrivateEndpointConnectionsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import PrivateEndpointConnectionsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -265,12 +293,15 @@ def private_link_resources(self): * 2018-01-01-preview: :class:`PrivateLinkResourcesOperations` * 2021-01-01-preview: :class:`PrivateLinkResourcesOperations` + * 2021-06-01-preview: :class:`PrivateLinkResourcesOperations` """ api_version = self._get_api_version('private_link_resources') if api_version == '2018-01-01-preview': from .v2018_01_01_preview.operations import PrivateLinkResourcesOperations as OperationClass elif api_version == '2021-01-01-preview': from .v2021_01_01_preview.operations import PrivateLinkResourcesOperations as OperationClass + elif api_version == '2021-06-01-preview': + from .v2021_06_01_preview.operations import PrivateLinkResourcesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_version.py index 14d0091047ac..2bcd3a3e386b 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_version.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/_version.py @@ -5,4 +5,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "9.0.0" +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/aio/_event_hub_management_client.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/aio/_event_hub_management_client.py index d228ab439d9c..d9c6cc69154b 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/aio/_event_hub_management_client.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/aio/_event_hub_management_client.py @@ -93,6 +93,7 @@ def models(cls, api_version=DEFAULT_API_VERSION): * 2017-04-01: :mod:`v2017_04_01.models` * 2018-01-01-preview: :mod:`v2018_01_01_preview.models` * 2021-01-01-preview: :mod:`v2021_01_01_preview.models` + * 2021-06-01-preview: :mod:`v2021_06_01_preview.models` """ if api_version == '2015-08-01': from ..v2015_08_01 import models @@ -106,6 +107,9 @@ def models(cls, api_version=DEFAULT_API_VERSION): elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview import models return models + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview import models + return models raise ValueError("API version {} is not available".format(api_version)) @property @@ -113,10 +117,13 @@ def clusters(self): """Instance depends on the API version: * 2018-01-01-preview: :class:`ClustersOperations` + * 2021-06-01-preview: :class:`ClustersOperations` """ api_version = self._get_api_version('clusters') if api_version == '2018-01-01-preview': from ..v2018_01_01_preview.aio.operations import ClustersOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import ClustersOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'clusters'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -126,10 +133,13 @@ def configuration(self): """Instance depends on the API version: * 2018-01-01-preview: :class:`ConfigurationOperations` + * 2021-06-01-preview: :class:`ConfigurationOperations` """ api_version = self._get_api_version('configuration') if api_version == '2018-01-01-preview': from ..v2018_01_01_preview.aio.operations import ConfigurationOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import ConfigurationOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'configuration'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -142,6 +152,7 @@ def consumer_groups(self): * 2017-04-01: :class:`ConsumerGroupsOperations` * 2018-01-01-preview: :class:`ConsumerGroupsOperations` * 2021-01-01-preview: :class:`ConsumerGroupsOperations` + * 2021-06-01-preview: :class:`ConsumerGroupsOperations` """ api_version = self._get_api_version('consumer_groups') if api_version == '2015-08-01': @@ -152,6 +163,8 @@ def consumer_groups(self): from ..v2018_01_01_preview.aio.operations import ConsumerGroupsOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import ConsumerGroupsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import ConsumerGroupsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'consumer_groups'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -163,6 +176,7 @@ def disaster_recovery_configs(self): * 2017-04-01: :class:`DisasterRecoveryConfigsOperations` * 2018-01-01-preview: :class:`DisasterRecoveryConfigsOperations` * 2021-01-01-preview: :class:`DisasterRecoveryConfigsOperations` + * 2021-06-01-preview: :class:`DisasterRecoveryConfigsOperations` """ api_version = self._get_api_version('disaster_recovery_configs') if api_version == '2017-04-01': @@ -171,6 +185,8 @@ def disaster_recovery_configs(self): from ..v2018_01_01_preview.aio.operations import DisasterRecoveryConfigsOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import DisasterRecoveryConfigsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import DisasterRecoveryConfigsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'disaster_recovery_configs'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -183,6 +199,7 @@ def event_hubs(self): * 2017-04-01: :class:`EventHubsOperations` * 2018-01-01-preview: :class:`EventHubsOperations` * 2021-01-01-preview: :class:`EventHubsOperations` + * 2021-06-01-preview: :class:`EventHubsOperations` """ api_version = self._get_api_version('event_hubs') if api_version == '2015-08-01': @@ -193,6 +210,8 @@ def event_hubs(self): from ..v2018_01_01_preview.aio.operations import EventHubsOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import EventHubsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import EventHubsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'event_hubs'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -205,6 +224,7 @@ def namespaces(self): * 2017-04-01: :class:`NamespacesOperations` * 2018-01-01-preview: :class:`NamespacesOperations` * 2021-01-01-preview: :class:`NamespacesOperations` + * 2021-06-01-preview: :class:`NamespacesOperations` """ api_version = self._get_api_version('namespaces') if api_version == '2015-08-01': @@ -215,6 +235,8 @@ def namespaces(self): from ..v2018_01_01_preview.aio.operations import NamespacesOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import NamespacesOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import NamespacesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'namespaces'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -227,6 +249,7 @@ def operations(self): * 2017-04-01: :class:`Operations` * 2018-01-01-preview: :class:`Operations` * 2021-01-01-preview: :class:`Operations` + * 2021-06-01-preview: :class:`Operations` """ api_version = self._get_api_version('operations') if api_version == '2015-08-01': @@ -237,6 +260,8 @@ def operations(self): from ..v2018_01_01_preview.aio.operations import Operations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import Operations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -247,12 +272,15 @@ def private_endpoint_connections(self): * 2018-01-01-preview: :class:`PrivateEndpointConnectionsOperations` * 2021-01-01-preview: :class:`PrivateEndpointConnectionsOperations` + * 2021-06-01-preview: :class:`PrivateEndpointConnectionsOperations` """ api_version = self._get_api_version('private_endpoint_connections') if api_version == '2018-01-01-preview': from ..v2018_01_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -263,12 +291,15 @@ def private_link_resources(self): * 2018-01-01-preview: :class:`PrivateLinkResourcesOperations` * 2021-01-01-preview: :class:`PrivateLinkResourcesOperations` + * 2021-06-01-preview: :class:`PrivateLinkResourcesOperations` """ api_version = self._get_api_version('private_link_resources') if api_version == '2018-01-01-preview': from ..v2018_01_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass elif api_version == '2021-01-01-preview': from ..v2021_01_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass + elif api_version == '2021-06-01-preview': + from ..v2021_06_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/__init__.py index 72f93dd9db6c..b65464aa7533 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/__init__.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/__init__.py @@ -7,6 +7,9 @@ # -------------------------------------------------------------------------- from ._event_hub_management_client import EventHubManagementClient +from ._version import VERSION + +__version__ = VERSION __all__ = ['EventHubManagementClient'] try: diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_configuration.py index f698e6e92d49..0a35e98603e4 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_configuration.py @@ -12,13 +12,14 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from ._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any from azure.core.credentials import TokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_version.py new file mode 100644 index 000000000000..f89ed38360ab --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/aio/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/aio/_configuration.py index 004a7b9d0e26..8087c2bd45ee 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/aio/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2015_08_01/aio/_configuration.py @@ -12,11 +12,12 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from .._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/__init__.py index 72f93dd9db6c..b65464aa7533 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/__init__.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/__init__.py @@ -7,6 +7,9 @@ # -------------------------------------------------------------------------- from ._event_hub_management_client import EventHubManagementClient +from ._version import VERSION + +__version__ = VERSION __all__ = ['EventHubManagementClient'] try: diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_configuration.py index 82776d7e4da1..f8469b0d1c00 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_configuration.py @@ -12,13 +12,14 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from ._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any from azure.core.credentials import TokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_version.py new file mode 100644 index 000000000000..f89ed38360ab --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/aio/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/aio/_configuration.py index 587b01b0f3f1..09fe8c679363 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/aio/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2017_04_01/aio/_configuration.py @@ -12,11 +12,12 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from .._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/__init__.py index 72f93dd9db6c..b65464aa7533 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/__init__.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/__init__.py @@ -7,6 +7,9 @@ # -------------------------------------------------------------------------- from ._event_hub_management_client import EventHubManagementClient +from ._version import VERSION + +__version__ = VERSION __all__ = ['EventHubManagementClient'] try: diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_configuration.py index 78cc5d1b0649..86da104bd0b7 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_configuration.py @@ -12,13 +12,14 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from ._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any from azure.core.credentials import TokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_version.py new file mode 100644 index 000000000000..f89ed38360ab --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/aio/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/aio/_configuration.py index 5ca84445a1f4..06b48560341b 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/aio/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/aio/_configuration.py @@ -12,11 +12,12 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from .._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models.py index 1287e38dd97e..195086f8199a 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models.py @@ -566,7 +566,7 @@ class ClusterSku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 32, 'minimum': 1}, + 'capacity': {'minimum': 1}, } _attribute_map = { @@ -735,6 +735,8 @@ class EHNamespace(TrackedResource): :type identity: ~azure.mgmt.eventhub.v2018_01_01_preview.models.Identity :ivar provisioning_state: Provisioning state of the Namespace. :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str :ivar created_at: The time the Namespace was created. :vartype created_at: ~datetime.datetime :ivar updated_at: The time the Namespace was updated. @@ -765,6 +767,7 @@ class EHNamespace(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, 'created_at': {'readonly': True}, 'updated_at': {'readonly': True}, 'service_bus_endpoint': {'readonly': True}, @@ -781,6 +784,7 @@ class EHNamespace(TrackedResource): 'sku': {'key': 'sku', 'type': 'Sku'}, 'identity': {'key': 'identity', 'type': 'Identity'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, @@ -801,6 +805,7 @@ def __init__( self.sku = kwargs.get('sku', None) self.identity = kwargs.get('identity', None) self.provisioning_state = None + self.status = None self.created_at = None self.updated_at = None self.service_bus_endpoint = None @@ -878,33 +883,27 @@ def __init__( class Encryption(msrest.serialization.Model): """Properties to configure Encryption. - Variables are only populated by the server, and will be ignored when sending a request. - :param key_vault_properties: Properties of KeyVault. :type key_vault_properties: list[~azure.mgmt.eventhub.v2018_01_01_preview.models.KeyVaultProperties] - :ivar key_source: Enumerates the possible value of keySource for Encryption. Default value: + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is "Microsoft.KeyVault". - :vartype key_source: str + :type key_source: str """ - _validation = { - 'key_source': {'constant': True}, - } - _attribute_map = { 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, 'key_source': {'key': 'keySource', 'type': 'str'}, } - key_source = "Microsoft.KeyVault" - def __init__( self, **kwargs ): super(Encryption, self).__init__(**kwargs) self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.key_source = kwargs.get('key_source', "Microsoft.KeyVault") class ErrorResponse(msrest.serialization.Model): @@ -1026,29 +1025,22 @@ def __init__( class Identity(msrest.serialization.Model): """Properties to configure Identity for Bring your Own Keys. - Variables are only populated by the server, and will be ignored when sending a request. - :param principal_id: ObjectId from the KeyVault. :type principal_id: str :param tenant_id: TenantId from the KeyVault. :type tenant_id: str - :ivar type: Enumerates the possible value Identity type, which currently supports only - 'SystemAssigned'. Default value: "SystemAssigned". - :vartype type: str + :param type: Enumerates the possible value Identity type, which currently supports only + 'SystemAssigned'. The only acceptable values to pass in are None and "SystemAssigned". The + default value is "SystemAssigned". + :type type: str """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } - type = "SystemAssigned" - def __init__( self, **kwargs @@ -1056,6 +1048,7 @@ def __init__( super(Identity, self).__init__(**kwargs) self.principal_id = kwargs.get('principal_id', None) self.tenant_id = kwargs.get('tenant_id', None) + self.type = kwargs.get('type', "SystemAssigned") class IpFilterRule(Resource): @@ -1646,7 +1639,7 @@ class Sku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 20, 'minimum': 0}, + 'capacity': {'minimum': 0}, } _attribute_map = { diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models_py3.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models_py3.py index f9539a1d126e..b77e8ceb7310 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models_py3.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2018_01_01_preview/models/_models_py3.py @@ -608,7 +608,7 @@ class ClusterSku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 32, 'minimum': 1}, + 'capacity': {'minimum': 1}, } _attribute_map = { @@ -793,6 +793,8 @@ class EHNamespace(TrackedResource): :type identity: ~azure.mgmt.eventhub.v2018_01_01_preview.models.Identity :ivar provisioning_state: Provisioning state of the Namespace. :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str :ivar created_at: The time the Namespace was created. :vartype created_at: ~datetime.datetime :ivar updated_at: The time the Namespace was updated. @@ -823,6 +825,7 @@ class EHNamespace(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, 'created_at': {'readonly': True}, 'updated_at': {'readonly': True}, 'service_bus_endpoint': {'readonly': True}, @@ -839,6 +842,7 @@ class EHNamespace(TrackedResource): 'sku': {'key': 'sku', 'type': 'Sku'}, 'identity': {'key': 'identity', 'type': 'Identity'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, @@ -870,6 +874,7 @@ def __init__( self.sku = sku self.identity = identity self.provisioning_state = None + self.status = None self.created_at = None self.updated_at = None self.service_bus_endpoint = None @@ -954,35 +959,30 @@ def __init__( class Encryption(msrest.serialization.Model): """Properties to configure Encryption. - Variables are only populated by the server, and will be ignored when sending a request. - :param key_vault_properties: Properties of KeyVault. :type key_vault_properties: list[~azure.mgmt.eventhub.v2018_01_01_preview.models.KeyVaultProperties] - :ivar key_source: Enumerates the possible value of keySource for Encryption. Default value: + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is "Microsoft.KeyVault". - :vartype key_source: str + :type key_source: str """ - _validation = { - 'key_source': {'constant': True}, - } - _attribute_map = { 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, 'key_source': {'key': 'keySource', 'type': 'str'}, } - key_source = "Microsoft.KeyVault" - def __init__( self, *, key_vault_properties: Optional[List["KeyVaultProperties"]] = None, + key_source: Optional[str] = "Microsoft.KeyVault", **kwargs ): super(Encryption, self).__init__(**kwargs) self.key_vault_properties = key_vault_properties + self.key_source = key_source class ErrorResponse(msrest.serialization.Model): @@ -1115,39 +1115,34 @@ def __init__( class Identity(msrest.serialization.Model): """Properties to configure Identity for Bring your Own Keys. - Variables are only populated by the server, and will be ignored when sending a request. - :param principal_id: ObjectId from the KeyVault. :type principal_id: str :param tenant_id: TenantId from the KeyVault. :type tenant_id: str - :ivar type: Enumerates the possible value Identity type, which currently supports only - 'SystemAssigned'. Default value: "SystemAssigned". - :vartype type: str + :param type: Enumerates the possible value Identity type, which currently supports only + 'SystemAssigned'. The only acceptable values to pass in are None and "SystemAssigned". The + default value is "SystemAssigned". + :type type: str """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } - type = "SystemAssigned" - def __init__( self, *, principal_id: Optional[str] = None, tenant_id: Optional[str] = None, + type: Optional[str] = "SystemAssigned", **kwargs ): super(Identity, self).__init__(**kwargs) self.principal_id = principal_id self.tenant_id = tenant_id + self.type = type class IpFilterRule(Resource): @@ -1790,7 +1785,7 @@ class Sku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 20, 'minimum': 0}, + 'capacity': {'minimum': 0}, } _attribute_map = { diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/__init__.py index 72f93dd9db6c..b65464aa7533 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/__init__.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/__init__.py @@ -7,6 +7,9 @@ # -------------------------------------------------------------------------- from ._event_hub_management_client import EventHubManagementClient +from ._version import VERSION + +__version__ = VERSION __all__ = ['EventHubManagementClient'] try: diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_configuration.py index 491686092882..fe795e13f108 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_configuration.py @@ -12,13 +12,14 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from ._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any from azure.core.credentials import TokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_version.py new file mode 100644 index 000000000000..f89ed38360ab --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/aio/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/aio/_configuration.py index 17c6db93518a..175aa96d6665 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/aio/_configuration.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/aio/_configuration.py @@ -12,11 +12,12 @@ from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from .._version import VERSION + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" class EventHubManagementClientConfiguration(Configuration): """Configuration for EventHubManagementClient. diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models.py index 28906af33b90..f1080cbf888e 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models.py @@ -606,6 +606,8 @@ class EHNamespace(TrackedResource): :vartype system_data: ~azure.mgmt.eventhub.v2021_01_01_preview.models.SystemData :ivar provisioning_state: Provisioning state of the Namespace. :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str :ivar created_at: The time the Namespace was created. :vartype created_at: ~datetime.datetime :ivar updated_at: The time the Namespace was updated. @@ -640,6 +642,7 @@ class EHNamespace(TrackedResource): 'type': {'readonly': True}, 'system_data': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, 'created_at': {'readonly': True}, 'updated_at': {'readonly': True}, 'service_bus_endpoint': {'readonly': True}, @@ -657,6 +660,7 @@ class EHNamespace(TrackedResource): 'identity': {'key': 'identity', 'type': 'Identity'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, @@ -679,6 +683,7 @@ def __init__( self.identity = kwargs.get('identity', None) self.system_data = None self.provisioning_state = None + self.status = None self.created_at = None self.updated_at = None self.service_bus_endpoint = None @@ -719,36 +724,30 @@ def __init__( class Encryption(msrest.serialization.Model): """Properties to configure Encryption. - Variables are only populated by the server, and will be ignored when sending a request. - :param key_vault_properties: Properties of KeyVault. :type key_vault_properties: list[~azure.mgmt.eventhub.v2021_01_01_preview.models.KeyVaultProperties] - :ivar key_source: Enumerates the possible value of keySource for Encryption. Default value: + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is "Microsoft.KeyVault". - :vartype key_source: str + :type key_source: str :param require_infrastructure_encryption: Enable Infrastructure Encryption (Double Encryption). :type require_infrastructure_encryption: bool """ - _validation = { - 'key_source': {'constant': True}, - } - _attribute_map = { 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, 'key_source': {'key': 'keySource', 'type': 'str'}, 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, } - key_source = "Microsoft.KeyVault" - def __init__( self, **kwargs ): super(Encryption, self).__init__(**kwargs) self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.key_source = kwargs.get('key_source', "Microsoft.KeyVault") self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) @@ -1348,7 +1347,7 @@ class Sku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 20, 'minimum': 0}, + 'capacity': {'minimum': 0}, } _attribute_map = { diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models_py3.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models_py3.py index 476a37dcd549..0bfac6b984e8 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models_py3.py +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_models_py3.py @@ -649,6 +649,8 @@ class EHNamespace(TrackedResource): :vartype system_data: ~azure.mgmt.eventhub.v2021_01_01_preview.models.SystemData :ivar provisioning_state: Provisioning state of the Namespace. :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str :ivar created_at: The time the Namespace was created. :vartype created_at: ~datetime.datetime :ivar updated_at: The time the Namespace was updated. @@ -683,6 +685,7 @@ class EHNamespace(TrackedResource): 'type': {'readonly': True}, 'system_data': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, 'created_at': {'readonly': True}, 'updated_at': {'readonly': True}, 'service_bus_endpoint': {'readonly': True}, @@ -700,6 +703,7 @@ class EHNamespace(TrackedResource): 'identity': {'key': 'identity', 'type': 'Identity'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, @@ -734,6 +738,7 @@ def __init__( self.identity = identity self.system_data = None self.provisioning_state = None + self.status = None self.created_at = None self.updated_at = None self.service_bus_endpoint = None @@ -777,39 +782,34 @@ def __init__( class Encryption(msrest.serialization.Model): """Properties to configure Encryption. - Variables are only populated by the server, and will be ignored when sending a request. - :param key_vault_properties: Properties of KeyVault. :type key_vault_properties: list[~azure.mgmt.eventhub.v2021_01_01_preview.models.KeyVaultProperties] - :ivar key_source: Enumerates the possible value of keySource for Encryption. Default value: + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is "Microsoft.KeyVault". - :vartype key_source: str + :type key_source: str :param require_infrastructure_encryption: Enable Infrastructure Encryption (Double Encryption). :type require_infrastructure_encryption: bool """ - _validation = { - 'key_source': {'constant': True}, - } - _attribute_map = { 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, 'key_source': {'key': 'keySource', 'type': 'str'}, 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, } - key_source = "Microsoft.KeyVault" - def __init__( self, *, key_vault_properties: Optional[List["KeyVaultProperties"]] = None, + key_source: Optional[str] = "Microsoft.KeyVault", require_infrastructure_encryption: Optional[bool] = None, **kwargs ): super(Encryption, self).__init__(**kwargs) self.key_vault_properties = key_vault_properties + self.key_source = key_source self.require_infrastructure_encryption = require_infrastructure_encryption @@ -1463,7 +1463,7 @@ class Sku(msrest.serialization.Model): _validation = { 'name': {'required': True}, - 'capacity': {'maximum': 20, 'minimum': 0}, + 'capacity': {'minimum': 0}, } _attribute_map = { diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/__init__.py new file mode 100644 index 000000000000..b65464aa7533 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._event_hub_management_client import EventHubManagementClient +from ._version import VERSION + +__version__ = VERSION +__all__ = ['EventHubManagementClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_configuration.py new file mode 100644 index 000000000000..b9bf44ea4e36 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_configuration.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + + +class EventHubManagementClientConfiguration(Configuration): + """Configuration for EventHubManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(EventHubManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-06-01-preview" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-eventhub/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_event_hub_management_client.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_event_hub_management_client.py new file mode 100644 index 000000000000..beeaa7db93c5 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_event_hub_management_client.py @@ -0,0 +1,129 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from ._configuration import EventHubManagementClientConfiguration +from .operations import ClustersOperations +from .operations import ConfigurationOperations +from .operations import NamespacesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import PrivateLinkResourcesOperations +from .operations import Operations +from .operations import EventHubsOperations +from .operations import DisasterRecoveryConfigsOperations +from .operations import ConsumerGroupsOperations +from . import models + + +class EventHubManagementClient(object): + """Azure Event Hubs client for managing Event Hubs Cluster, IPFilter Rules and VirtualNetworkRules resources. + + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.eventhub.v2021_06_01_preview.operations.ClustersOperations + :ivar configuration: ConfigurationOperations operations + :vartype configuration: azure.mgmt.eventhub.v2021_06_01_preview.operations.ConfigurationOperations + :ivar namespaces: NamespacesOperations operations + :vartype namespaces: azure.mgmt.eventhub.v2021_06_01_preview.operations.NamespacesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure.mgmt.eventhub.v2021_06_01_preview.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.eventhub.v2021_06_01_preview.operations.PrivateLinkResourcesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.eventhub.v2021_06_01_preview.operations.Operations + :ivar event_hubs: EventHubsOperations operations + :vartype event_hubs: azure.mgmt.eventhub.v2021_06_01_preview.operations.EventHubsOperations + :ivar disaster_recovery_configs: DisasterRecoveryConfigsOperations operations + :vartype disaster_recovery_configs: azure.mgmt.eventhub.v2021_06_01_preview.operations.DisasterRecoveryConfigsOperations + :ivar consumer_groups: ConsumerGroupsOperations operations + :vartype consumer_groups: azure.mgmt.eventhub.v2021_06_01_preview.operations.ConsumerGroupsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = EventHubManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.configuration = ConfigurationOperations( + self._client, self._config, self._serialize, self._deserialize) + self.namespaces = NamespacesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.event_hubs = EventHubsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.disaster_recovery_configs = DisasterRecoveryConfigsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.consumer_groups = ConsumerGroupsOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> EventHubManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_metadata.json b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_metadata.json new file mode 100644 index 000000000000..8a86fd3ac377 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_metadata.json @@ -0,0 +1,111 @@ +{ + "chosen_version": "2021-06-01-preview", + "total_api_version_list": ["2021-06-01-preview"], + "client": { + "name": "EventHubManagementClient", + "filename": "_event_hub_management_client", + "description": "Azure Event Hubs client for managing Event Hubs Cluster, IPFilter Rules and VirtualNetworkRules resources.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"EventHubManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"EventHubManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "clusters": "ClustersOperations", + "configuration": "ConfigurationOperations", + "namespaces": "NamespacesOperations", + "private_endpoint_connections": "PrivateEndpointConnectionsOperations", + "private_link_resources": "PrivateLinkResourcesOperations", + "operations": "Operations", + "event_hubs": "EventHubsOperations", + "disaster_recovery_configs": "DisasterRecoveryConfigsOperations", + "consumer_groups": "ConsumerGroupsOperations" + } +} \ No newline at end of file diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_version.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_version.py new file mode 100644 index 000000000000..f89ed38360ab --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "9.1.0" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/__init__.py new file mode 100644 index 000000000000..7cb0ca65be40 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._event_hub_management_client import EventHubManagementClient +__all__ = ['EventHubManagementClient'] diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_configuration.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_configuration.py new file mode 100644 index 000000000000..f5a3a9c9dde4 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_configuration.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class EventHubManagementClientConfiguration(Configuration): + """Configuration for EventHubManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(EventHubManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-06-01-preview" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-eventhub/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_event_hub_management_client.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_event_hub_management_client.py new file mode 100644 index 000000000000..b36eca3aa4b0 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/_event_hub_management_client.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import EventHubManagementClientConfiguration +from .operations import ClustersOperations +from .operations import ConfigurationOperations +from .operations import NamespacesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import PrivateLinkResourcesOperations +from .operations import Operations +from .operations import EventHubsOperations +from .operations import DisasterRecoveryConfigsOperations +from .operations import ConsumerGroupsOperations +from .. import models + + +class EventHubManagementClient(object): + """Azure Event Hubs client for managing Event Hubs Cluster, IPFilter Rules and VirtualNetworkRules resources. + + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.ClustersOperations + :ivar configuration: ConfigurationOperations operations + :vartype configuration: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.ConfigurationOperations + :ivar namespaces: NamespacesOperations operations + :vartype namespaces: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.NamespacesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.PrivateLinkResourcesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.Operations + :ivar event_hubs: EventHubsOperations operations + :vartype event_hubs: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.EventHubsOperations + :ivar disaster_recovery_configs: DisasterRecoveryConfigsOperations operations + :vartype disaster_recovery_configs: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.DisasterRecoveryConfigsOperations + :ivar consumer_groups: ConsumerGroupsOperations operations + :vartype consumer_groups: azure.mgmt.eventhub.v2021_06_01_preview.aio.operations.ConsumerGroupsOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Subscription credentials that uniquely identify a Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = EventHubManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.configuration = ConfigurationOperations( + self._client, self._config, self._serialize, self._deserialize) + self.namespaces = NamespacesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.event_hubs = EventHubsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.disaster_recovery_configs = DisasterRecoveryConfigsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.consumer_groups = ConsumerGroupsOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "EventHubManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/__init__.py new file mode 100644 index 000000000000..11daeaa7c220 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._clusters_operations import ClustersOperations +from ._configuration_operations import ConfigurationOperations +from ._namespaces_operations import NamespacesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._operations import Operations +from ._event_hubs_operations import EventHubsOperations +from ._disaster_recovery_configs_operations import DisasterRecoveryConfigsOperations +from ._consumer_groups_operations import ConsumerGroupsOperations + +__all__ = [ + 'ClustersOperations', + 'ConfigurationOperations', + 'NamespacesOperations', + 'PrivateEndpointConnectionsOperations', + 'PrivateLinkResourcesOperations', + 'Operations', + 'EventHubsOperations', + 'DisasterRecoveryConfigsOperations', + 'ConsumerGroupsOperations', +] diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_clusters_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_clusters_operations.py new file mode 100644 index 000000000000..2a1e46327dc1 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_clusters_operations.py @@ -0,0 +1,720 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations: + """ClustersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list_available_cluster_region( + self, + **kwargs: Any + ) -> "_models.AvailableClustersList": + """List the quantity of available pre-provisioned Event Hubs Clusters, indexed by Azure region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AvailableClustersList, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AvailableClustersList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableClustersList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_available_cluster_region.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AvailableClustersList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_available_cluster_region.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/availableClusterRegions'} # type: ignore + + def list_by_subscription( + self, + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: + """Lists the available Event Hubs Clusters within an ARM resource group. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/clusters'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: + """Lists the available Event Hubs Clusters within an ARM resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters'} # type: ignore + + async def get( + self, + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> "_models.Cluster": + """Gets the resource description of the specified Event Hubs Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + cluster_name: str, + parameters: "_models.Cluster", + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + cluster_name: str, + parameters: "_models.Cluster", + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: + """Creates or updates an instance of an Event Hubs Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: Parameters for creating a eventhub cluster resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + cluster_name: str, + parameters: "_models.Cluster", + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def begin_update( + self, + resource_group_name: str, + cluster_name: str, + parameters: "_models.Cluster", + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: + """Modifies mutable properties on the Event Hubs Cluster. This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: The properties of the Event Hubs Cluster which should be updated. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes an existing Event Hubs Cluster. This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + async def list_namespaces( + self, + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> "_models.EHNamespaceIdListResult": + """List all Event Hubs Namespace IDs in an Event Hubs Dedicated Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespaceIdListResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceIdListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceIdListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_namespaces.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EHNamespaceIdListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_namespaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/namespaces'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_configuration_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_configuration_operations.py new file mode 100644 index 000000000000..376ff8ea4f20 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_configuration_operations.py @@ -0,0 +1,174 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ConfigurationOperations: + """ConfigurationOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def patch( + self, + resource_group_name: str, + cluster_name: str, + parameters: "_models.ClusterQuotaConfigurationProperties", + **kwargs: Any + ) -> Optional["_models.ClusterQuotaConfigurationProperties"]: + """Replace all specified Event Hubs Cluster settings with those contained in the request body. + Leaves the settings not specified in the request body unmodified. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: Parameters for creating an Event Hubs Cluster resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ClusterQuotaConfigurationProperties, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ClusterQuotaConfigurationProperties"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ClusterQuotaConfigurationProperties') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/quotaConfiguration/default'} # type: ignore + + async def get( + self, + resource_group_name: str, + cluster_name: str, + **kwargs: Any + ) -> "_models.ClusterQuotaConfigurationProperties": + """Get all Event Hubs Cluster settings - a collection of key/value pairs which represent the + quotas and settings imposed on the cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ClusterQuotaConfigurationProperties, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterQuotaConfigurationProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/quotaConfiguration/default'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_consumer_groups_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_consumer_groups_operations.py new file mode 100644 index 000000000000..ea56378f586c --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_consumer_groups_operations.py @@ -0,0 +1,340 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ConsumerGroupsOperations: + """ConsumerGroupsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + consumer_group_name: str, + parameters: "_models.ConsumerGroup", + **kwargs: Any + ) -> "_models.ConsumerGroup": + """Creates or updates an Event Hubs consumer group as a nested resource within a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :param parameters: Parameters supplied to create or update a consumer group resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConsumerGroup, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroup"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ConsumerGroup') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ConsumerGroup', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + consumer_group_name: str, + **kwargs: Any + ) -> None: + """Deletes a consumer group from the specified Event Hub and resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + consumer_group_name: str, + **kwargs: Any + ) -> "_models.ConsumerGroup": + """Gets a description for the specified consumer group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConsumerGroup, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroup"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ConsumerGroup', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + def list_by_event_hub( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + skip: Optional[int] = None, + top: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ConsumerGroupListResult"]: + """Gets all the consumer groups in a Namespace. An empty feed is returned if no consumer group + exists in the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param skip: Skip is only used if a previous operation returned a partial result. If a previous + response contains a nextLink element, the value of the nextLink element will include a skip + parameter that specifies a starting point to use for subsequent calls. + :type skip: int + :param top: May be used to limit the number of results to the most recent N usageDetails. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConsumerGroupListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroupListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroupListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_event_hub.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=1000, minimum=0) + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ConsumerGroupListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_event_hub.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_disaster_recovery_configs_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_disaster_recovery_configs_operations.py new file mode 100644 index 000000000000..d6e7c63e233c --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_disaster_recovery_configs_operations.py @@ -0,0 +1,714 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DisasterRecoveryConfigsOperations: + """DisasterRecoveryConfigsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def check_name_availability( + self, + resource_group_name: str, + namespace_name: str, + parameters: "_models.CheckNameAvailabilityParameter", + **kwargs: Any + ) -> "_models.CheckNameAvailabilityResult": + """Check the give Namespace name availability. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters to check availability of the given Alias name. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityParameter + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'CheckNameAvailabilityParameter') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/checkNameAvailability'} # type: ignore + + def list( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.ArmDisasterRecoveryListResult"]: + """Gets all Alias(Disaster Recovery configurations). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ArmDisasterRecoveryListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecoveryListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecoveryListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ArmDisasterRecoveryListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + parameters: "_models.ArmDisasterRecovery", + **kwargs: Any + ) -> Optional["_models.ArmDisasterRecovery"]: + """Creates or updates a new Alias(Disaster Recovery configuration). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param parameters: Parameters required to create an Alias(Disaster Recovery configuration). + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ArmDisasterRecovery, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ArmDisasterRecovery"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ArmDisasterRecovery') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + **kwargs: Any + ) -> None: + """Deletes an Alias(Disaster Recovery configuration). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + async def get( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + **kwargs: Any + ) -> "_models.ArmDisasterRecovery": + """Retrieves Alias(Disaster Recovery configuration) for primary or secondary namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ArmDisasterRecovery, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecovery"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + async def break_pairing( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + **kwargs: Any + ) -> None: + """This operation disables the Disaster Recovery and stops replicating changes from primary to + secondary namespaces. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.break_pairing.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + break_pairing.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/breakPairing'} # type: ignore + + async def fail_over( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + **kwargs: Any + ) -> None: + """Invokes GEO DR failover and reconfigure the alias to point to the secondary namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.fail_over.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + fail_over.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/failover'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + **kwargs: Any + ) -> AsyncIterable["_models.AuthorizationRuleListResult"]: + """Gets a list of authorization rules for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules'} # type: ignore + + async def get_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AuthorizationRule": + """Gets an AuthorizationRule for a Namespace by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def list_keys( + self, + resource_group_name: str, + namespace_name: str, + alias: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AccessKeys": + """Gets the primary and secondary connection strings for the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_event_hubs_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_event_hubs_operations.py new file mode 100644 index 000000000000..673871fa8c51 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_event_hubs_operations.py @@ -0,0 +1,752 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class EventHubsOperations: + """EventHubsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_namespace( + self, + resource_group_name: str, + namespace_name: str, + skip: Optional[int] = None, + top: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.EventHubListResult"]: + """Gets all the Event Hubs in a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param skip: Skip is only used if a previous operation returned a partial result. If a previous + response contains a nextLink element, the value of the nextLink element will include a skip + parameter that specifies a starting point to use for subsequent calls. + :type skip: int + :param top: May be used to limit the number of results to the most recent N usageDetails. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EventHubListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EventHubListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_namespace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=1000, minimum=0) + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('EventHubListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_namespace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + parameters: "_models.Eventhub", + **kwargs: Any + ) -> "_models.Eventhub": + """Creates or updates a new Event Hub as a nested resource within a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param parameters: Parameters supplied to create an Event Hub resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Eventhub, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Eventhub"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Eventhub') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Eventhub', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + **kwargs: Any + ) -> None: + """Deletes an Event Hub from the specified Namespace and resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + **kwargs: Any + ) -> "_models.Eventhub": + """Gets an Event Hubs description for the specified Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Eventhub, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Eventhub"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Eventhub', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.AuthorizationRuleListResult"]: + """Gets the authorization rules for an Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules'} # type: ignore + + async def create_or_update_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + authorization_rule_name: str, + parameters: "_models.AuthorizationRule", + **kwargs: Any + ) -> "_models.AuthorizationRule": + """Creates or updates an AuthorizationRule for the specified Event Hub. Creation/update of the + AuthorizationRule will take a few seconds to take effect. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: The shared access AuthorizationRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AuthorizationRule') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def get_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AuthorizationRule": + """Gets an AuthorizationRule for an Event Hub by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def delete_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> None: + """Deletes an Event Hub AuthorizationRule. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def list_keys( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AccessKeys": + """Gets the ACS and SAS connection strings for the Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore + + async def regenerate_keys( + self, + resource_group_name: str, + namespace_name: str, + event_hub_name: str, + authorization_rule_name: str, + parameters: "_models.RegenerateAccessKeyParameters", + **kwargs: Any + ) -> "_models.AccessKeys": + """Regenerates the ACS and SAS connection strings for the Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: Parameters supplied to regenerate the AuthorizationRule Keys + (PrimaryKey/SecondaryKey). + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.RegenerateAccessKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'RegenerateAccessKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}/regenerateKeys'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_namespaces_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_namespaces_operations.py new file mode 100644 index 000000000000..12510f5a7785 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_namespaces_operations.py @@ -0,0 +1,1143 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class NamespacesOperations: + """NamespacesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.EHNamespaceListResult"]: + """Lists all the available Namespaces within a subscription, irrespective of the resource groups. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EHNamespaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('EHNamespaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/namespaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.EHNamespaceListResult"]: + """Lists the available Namespaces within a resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EHNamespaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('EHNamespaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + namespace_name: str, + parameters: "_models.EHNamespace", + **kwargs: Any + ) -> Optional["_models.EHNamespace"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EHNamespace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'EHNamespace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + namespace_name: str, + parameters: "_models.EHNamespace", + **kwargs: Any + ) -> AsyncLROPoller["_models.EHNamespace"]: + """Creates or updates a namespace. Once created, this namespace's resource manifest is immutable. + This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters for creating a namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EHNamespace or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes an existing namespace. This operation also removes all associated resources under the + namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> "_models.EHNamespace": + """Gets the description of the specified namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespace, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + namespace_name: str, + parameters: "_models.EHNamespace", + **kwargs: Any + ) -> Optional["_models.EHNamespace"]: + """Creates or updates a namespace. Once created, this namespace's resource manifest is immutable. + This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters for updating a namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespace, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EHNamespace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'EHNamespace') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + async def create_or_update_network_rule_set( + self, + resource_group_name: str, + namespace_name: str, + parameters: "_models.NetworkRuleSet", + **kwargs: Any + ) -> "_models.NetworkRuleSet": + """Create or update NetworkRuleSet for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: The Namespace IpFilterRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NetworkRuleSet, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkRuleSet"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_network_rule_set.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'NetworkRuleSet') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NetworkRuleSet', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_network_rule_set.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/networkRuleSets/default'} # type: ignore + + async def get_network_rule_set( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> "_models.NetworkRuleSet": + """Gets NetworkRuleSet for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NetworkRuleSet, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkRuleSet"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_network_rule_set.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NetworkRuleSet', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_network_rule_set.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/networkRuleSets/default'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.AuthorizationRuleListResult"]: + """Gets a list of authorization rules for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules'} # type: ignore + + async def create_or_update_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + authorization_rule_name: str, + parameters: "_models.AuthorizationRule", + **kwargs: Any + ) -> "_models.AuthorizationRule": + """Creates or updates an AuthorizationRule for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: The shared access AuthorizationRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AuthorizationRule') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def delete_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> None: + """Deletes an AuthorizationRule for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def get_authorization_rule( + self, + resource_group_name: str, + namespace_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AuthorizationRule": + """Gets an AuthorizationRule for a Namespace by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + async def list_keys( + self, + resource_group_name: str, + namespace_name: str, + authorization_rule_name: str, + **kwargs: Any + ) -> "_models.AccessKeys": + """Gets the primary and secondary connection strings for the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore + + async def regenerate_keys( + self, + resource_group_name: str, + namespace_name: str, + authorization_rule_name: str, + parameters: "_models.RegenerateAccessKeyParameters", + **kwargs: Any + ) -> "_models.AccessKeys": + """Regenerates the primary or secondary connection strings for the specified Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: Parameters required to regenerate the connection string. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.RegenerateAccessKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'RegenerateAccessKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}/regenerateKeys'} # type: ignore + + async def check_name_availability( + self, + parameters: "_models.CheckNameAvailabilityParameter", + **kwargs: Any + ) -> "_models.CheckNameAvailabilityResult": + """Check the give Namespace name availability. + + :param parameters: Parameters to check availability of the given Namespace name. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityParameter + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'CheckNameAvailabilityParameter') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/checkNameAvailability'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_operations.py new file mode 100644 index 000000000000..b25217f8e540 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: + """Lists all of the available Event Hub REST API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.EventHub/operations'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_endpoint_connections_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..254f850de138 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,375 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations: + """PrivateEndpointConnectionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: + """Gets the available PrivateEndpointConnections within a namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + namespace_name: str, + private_endpoint_connection_name: str, + parameters: "_models.PrivateEndpointConnection", + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + """Creates or updates PrivateEndpointConnections of service namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :param parameters: Parameters supplied to update Status of PrivateEndPoint Connection to + namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + namespace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + namespace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes an existing namespace. This operation also removes all associated resources under the + namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + namespace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + """Gets a description for the specified Private Endpoint Connection name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_link_resources_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..ad20793fc87f --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/aio/operations/_private_link_resources_operations.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations: + """PrivateLinkResourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + namespace_name: str, + **kwargs: Any + ) -> "_models.PrivateLinkResourcesListResult": + """Gets lists of resources that supports Privatelinks. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesListResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkResourcesListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateLinkResources'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/__init__.py new file mode 100644 index 000000000000..0a7089481bd0 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/__init__.py @@ -0,0 +1,191 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AccessKeys + from ._models_py3 import ArmDisasterRecovery + from ._models_py3 import ArmDisasterRecoveryListResult + from ._models_py3 import AuthorizationRule + from ._models_py3 import AuthorizationRuleListResult + from ._models_py3 import AvailableCluster + from ._models_py3 import AvailableClustersList + from ._models_py3 import CaptureDescription + from ._models_py3 import CheckNameAvailabilityParameter + from ._models_py3 import CheckNameAvailabilityResult + from ._models_py3 import Cluster + from ._models_py3 import ClusterListResult + from ._models_py3 import ClusterQuotaConfigurationProperties + from ._models_py3 import ClusterSku + from ._models_py3 import ConnectionState + from ._models_py3 import ConsumerGroup + from ._models_py3 import ConsumerGroupListResult + from ._models_py3 import Destination + from ._models_py3 import EHNamespace + from ._models_py3 import EHNamespaceIdContainer + from ._models_py3 import EHNamespaceIdListResult + from ._models_py3 import EHNamespaceListResult + from ._models_py3 import Encryption + from ._models_py3 import ErrorResponse + from ._models_py3 import EventHubListResult + from ._models_py3 import Eventhub + from ._models_py3 import Identity + from ._models_py3 import KeyVaultProperties + from ._models_py3 import NWRuleSetIpRules + from ._models_py3 import NWRuleSetVirtualNetworkRules + from ._models_py3 import NetworkRuleSet + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateEndpointConnectionListResult + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourcesListResult + from ._models_py3 import RegenerateAccessKeyParameters + from ._models_py3 import Resource + from ._models_py3 import Sku + from ._models_py3 import Subnet + from ._models_py3 import SystemData + from ._models_py3 import TrackedResource + from ._models_py3 import UserAssignedIdentity + from ._models_py3 import UserAssignedIdentityProperties +except (SyntaxError, ImportError): + from ._models import AccessKeys # type: ignore + from ._models import ArmDisasterRecovery # type: ignore + from ._models import ArmDisasterRecoveryListResult # type: ignore + from ._models import AuthorizationRule # type: ignore + from ._models import AuthorizationRuleListResult # type: ignore + from ._models import AvailableCluster # type: ignore + from ._models import AvailableClustersList # type: ignore + from ._models import CaptureDescription # type: ignore + from ._models import CheckNameAvailabilityParameter # type: ignore + from ._models import CheckNameAvailabilityResult # type: ignore + from ._models import Cluster # type: ignore + from ._models import ClusterListResult # type: ignore + from ._models import ClusterQuotaConfigurationProperties # type: ignore + from ._models import ClusterSku # type: ignore + from ._models import ConnectionState # type: ignore + from ._models import ConsumerGroup # type: ignore + from ._models import ConsumerGroupListResult # type: ignore + from ._models import Destination # type: ignore + from ._models import EHNamespace # type: ignore + from ._models import EHNamespaceIdContainer # type: ignore + from ._models import EHNamespaceIdListResult # type: ignore + from ._models import EHNamespaceListResult # type: ignore + from ._models import Encryption # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import EventHubListResult # type: ignore + from ._models import Eventhub # type: ignore + from ._models import Identity # type: ignore + from ._models import KeyVaultProperties # type: ignore + from ._models import NWRuleSetIpRules # type: ignore + from ._models import NWRuleSetVirtualNetworkRules # type: ignore + from ._models import NetworkRuleSet # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateEndpointConnectionListResult # type: ignore + from ._models import PrivateLinkResource # type: ignore + from ._models import PrivateLinkResourcesListResult # type: ignore + from ._models import RegenerateAccessKeyParameters # type: ignore + from ._models import Resource # type: ignore + from ._models import Sku # type: ignore + from ._models import Subnet # type: ignore + from ._models import SystemData # type: ignore + from ._models import TrackedResource # type: ignore + from ._models import UserAssignedIdentity # type: ignore + from ._models import UserAssignedIdentityProperties # type: ignore + +from ._event_hub_management_client_enums import ( + AccessRights, + ClusterSkuName, + CreatedByType, + DefaultAction, + EncodingCaptureDescription, + EndPointProvisioningState, + EntityStatus, + KeyType, + ManagedServiceIdentityType, + NetworkRuleIPAction, + PrivateLinkConnectionStatus, + ProvisioningStateDR, + PublicNetworkAccessFlag, + RoleDisasterRecovery, + SkuName, + SkuTier, + UnavailableReason, +) + +__all__ = [ + 'AccessKeys', + 'ArmDisasterRecovery', + 'ArmDisasterRecoveryListResult', + 'AuthorizationRule', + 'AuthorizationRuleListResult', + 'AvailableCluster', + 'AvailableClustersList', + 'CaptureDescription', + 'CheckNameAvailabilityParameter', + 'CheckNameAvailabilityResult', + 'Cluster', + 'ClusterListResult', + 'ClusterQuotaConfigurationProperties', + 'ClusterSku', + 'ConnectionState', + 'ConsumerGroup', + 'ConsumerGroupListResult', + 'Destination', + 'EHNamespace', + 'EHNamespaceIdContainer', + 'EHNamespaceIdListResult', + 'EHNamespaceListResult', + 'Encryption', + 'ErrorResponse', + 'EventHubListResult', + 'Eventhub', + 'Identity', + 'KeyVaultProperties', + 'NWRuleSetIpRules', + 'NWRuleSetVirtualNetworkRules', + 'NetworkRuleSet', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateEndpointConnectionListResult', + 'PrivateLinkResource', + 'PrivateLinkResourcesListResult', + 'RegenerateAccessKeyParameters', + 'Resource', + 'Sku', + 'Subnet', + 'SystemData', + 'TrackedResource', + 'UserAssignedIdentity', + 'UserAssignedIdentityProperties', + 'AccessRights', + 'ClusterSkuName', + 'CreatedByType', + 'DefaultAction', + 'EncodingCaptureDescription', + 'EndPointProvisioningState', + 'EntityStatus', + 'KeyType', + 'ManagedServiceIdentityType', + 'NetworkRuleIPAction', + 'PrivateLinkConnectionStatus', + 'ProvisioningStateDR', + 'PublicNetworkAccessFlag', + 'RoleDisasterRecovery', + 'SkuName', + 'SkuTier', + 'UnavailableReason', +] diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_event_hub_management_client_enums.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_event_hub_management_client_enums.py new file mode 100644 index 000000000000..bdaf61eccdfc --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_event_hub_management_client_enums.py @@ -0,0 +1,171 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AccessRights(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + MANAGE = "Manage" + SEND = "Send" + LISTEN = "Listen" + +class ClusterSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Name of this SKU. + """ + + DEDICATED = "Dedicated" + +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class DefaultAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Default Action for Network Rule Set + """ + + ALLOW = "Allow" + DENY = "Deny" + +class EncodingCaptureDescription(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates the possible values for the encoding format of capture description. Note: + 'AvroDeflate' will be deprecated in New API Version + """ + + AVRO = "Avro" + AVRO_DEFLATE = "AvroDeflate" + +class EndPointProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Provisioning state of the Private Endpoint Connection. + """ + + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + CANCELED = "Canceled" + FAILED = "Failed" + +class EntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates the possible values for the status of the Event Hub. + """ + + ACTIVE = "Active" + DISABLED = "Disabled" + RESTORING = "Restoring" + SEND_DISABLED = "SendDisabled" + RECEIVE_DISABLED = "ReceiveDisabled" + CREATING = "Creating" + DELETING = "Deleting" + RENAMING = "Renaming" + UNKNOWN = "Unknown" + +class KeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The access key to regenerate. + """ + + PRIMARY_KEY = "PrimaryKey" + SECONDARY_KEY = "SecondaryKey" + +class ManagedServiceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of managed service identity. + """ + + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned" + NONE = "None" + +class NetworkRuleIPAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The IP Filter Action + """ + + ALLOW = "Allow" + +class PrivateLinkConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Status of the connection. + """ + + PENDING = "Pending" + APPROVED = "Approved" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + +class ProvisioningStateDR(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Provisioning state of the Alias(Disaster Recovery configuration) - possible values 'Accepted' + or 'Succeeded' or 'Failed' + """ + + ACCEPTED = "Accepted" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + +class PublicNetworkAccessFlag(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """This determines if traffic is allowed over public network. By default it is enabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class RoleDisasterRecovery(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """role of namespace in GEO DR - possible values 'Primary' or 'PrimaryNotReplicating' or + 'Secondary' + """ + + PRIMARY = "Primary" + PRIMARY_NOT_REPLICATING = "PrimaryNotReplicating" + SECONDARY = "Secondary" + +class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Name of this SKU. + """ + + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + +class SkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The billing tier of this particular SKU. + """ + + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + +class UnavailableReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the reason for the unavailability of the service. + """ + + NONE = "None" + INVALID_NAME = "InvalidName" + SUBSCRIPTION_IS_DISABLED = "SubscriptionIsDisabled" + NAME_IN_USE = "NameInUse" + NAME_IN_LOCKDOWN = "NameInLockdown" + TOO_MANY_NAMESPACE_IN_CURRENT_SUBSCRIPTION = "TooManyNamespaceInCurrentSubscription" diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models.py new file mode 100644 index 000000000000..098183c8cdbb --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models.py @@ -0,0 +1,1671 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AccessKeys(msrest.serialization.Model): + """Namespace/EventHub Connection String. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar primary_connection_string: Primary connection string of the created namespace + AuthorizationRule. + :vartype primary_connection_string: str + :ivar secondary_connection_string: Secondary connection string of the created namespace + AuthorizationRule. + :vartype secondary_connection_string: str + :ivar alias_primary_connection_string: Primary connection string of the alias if GEO DR is + enabled. + :vartype alias_primary_connection_string: str + :ivar alias_secondary_connection_string: Secondary connection string of the alias if GEO DR is + enabled. + :vartype alias_secondary_connection_string: str + :ivar primary_key: A base64-encoded 256-bit primary key for signing and validating the SAS + token. + :vartype primary_key: str + :ivar secondary_key: A base64-encoded 256-bit primary key for signing and validating the SAS + token. + :vartype secondary_key: str + :ivar key_name: A string that describes the AuthorizationRule. + :vartype key_name: str + """ + + _validation = { + 'primary_connection_string': {'readonly': True}, + 'secondary_connection_string': {'readonly': True}, + 'alias_primary_connection_string': {'readonly': True}, + 'alias_secondary_connection_string': {'readonly': True}, + 'primary_key': {'readonly': True}, + 'secondary_key': {'readonly': True}, + 'key_name': {'readonly': True}, + } + + _attribute_map = { + 'primary_connection_string': {'key': 'primaryConnectionString', 'type': 'str'}, + 'secondary_connection_string': {'key': 'secondaryConnectionString', 'type': 'str'}, + 'alias_primary_connection_string': {'key': 'aliasPrimaryConnectionString', 'type': 'str'}, + 'alias_secondary_connection_string': {'key': 'aliasSecondaryConnectionString', 'type': 'str'}, + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AccessKeys, self).__init__(**kwargs) + self.primary_connection_string = None + self.secondary_connection_string = None + self.alias_primary_connection_string = None + self.alias_secondary_connection_string = None + self.primary_key = None + self.secondary_key = None + self.key_name = None + + +class Resource(msrest.serialization.Model): + """The resource definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class ArmDisasterRecovery(Resource): + """Single item in List or Get Alias(Disaster Recovery configuration) operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar provisioning_state: Provisioning state of the Alias(Disaster Recovery configuration) - + possible values 'Accepted' or 'Succeeded' or 'Failed'. Possible values include: "Accepted", + "Succeeded", "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.ProvisioningStateDR + :param partner_namespace: ARM Id of the Primary/Secondary eventhub namespace name, which is + part of GEO DR pairing. + :type partner_namespace: str + :param alternate_name: Alternate name specified when alias and namespace names are same. + :type alternate_name: str + :ivar role: role of namespace in GEO DR - possible values 'Primary' or 'PrimaryNotReplicating' + or 'Secondary'. Possible values include: "Primary", "PrimaryNotReplicating", "Secondary". + :vartype role: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.RoleDisasterRecovery + :ivar pending_replication_operations_count: Number of entities pending to be replicated. + :vartype pending_replication_operations_count: long + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'role': {'readonly': True}, + 'pending_replication_operations_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'partner_namespace': {'key': 'properties.partnerNamespace', 'type': 'str'}, + 'alternate_name': {'key': 'properties.alternateName', 'type': 'str'}, + 'role': {'key': 'properties.role', 'type': 'str'}, + 'pending_replication_operations_count': {'key': 'properties.pendingReplicationOperationsCount', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmDisasterRecovery, self).__init__(**kwargs) + self.system_data = None + self.provisioning_state = None + self.partner_namespace = kwargs.get('partner_namespace', None) + self.alternate_name = kwargs.get('alternate_name', None) + self.role = None + self.pending_replication_operations_count = None + + +class ArmDisasterRecoveryListResult(msrest.serialization.Model): + """The result of the List Alias(Disaster Recovery configuration) operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param value: List of Alias(Disaster Recovery configurations). + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery] + :ivar next_link: Link to the next set of results. Not empty if Value contains incomplete list + of Alias(Disaster Recovery configuration). + :vartype next_link: str + """ + + _validation = { + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ArmDisasterRecovery]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmDisasterRecoveryListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = None + + +class AuthorizationRule(Resource): + """Single item in a List or Get AuthorizationRule operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param rights: The rights associated with the rule. + :type rights: list[str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessRights] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'rights': {'key': 'properties.rights', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(AuthorizationRule, self).__init__(**kwargs) + self.system_data = None + self.rights = kwargs.get('rights', None) + + +class AuthorizationRuleListResult(msrest.serialization.Model): + """The response from the List namespace operation. + + :param value: Result of the List Authorization Rules operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule] + :param next_link: Link to the next set of results. Not empty if Value contains an incomplete + list of Authorization Rules. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AuthorizationRule]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AuthorizationRuleListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class AvailableCluster(msrest.serialization.Model): + """Pre-provisioned and readily available Event Hubs Cluster count per region. + + :param location: Location fo the Available Cluster. + :type location: str + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AvailableCluster, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + + +class AvailableClustersList(msrest.serialization.Model): + """The response of the List Available Clusters operation. + + :param value: The count of readily available and pre-provisioned Event Hubs Clusters per + region. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.AvailableCluster] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AvailableCluster]'}, + } + + def __init__( + self, + **kwargs + ): + super(AvailableClustersList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class CaptureDescription(msrest.serialization.Model): + """Properties to configure capture description for eventhub. + + :param enabled: A value that indicates whether capture description is enabled. + :type enabled: bool + :param encoding: Enumerates the possible values for the encoding format of capture description. + Note: 'AvroDeflate' will be deprecated in New API Version. Possible values include: "Avro", + "AvroDeflate". + :type encoding: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.EncodingCaptureDescription + :param interval_in_seconds: The time window allows you to set the frequency with which the + capture to Azure Blobs will happen, value should between 60 to 900 seconds. + :type interval_in_seconds: int + :param size_limit_in_bytes: The size window defines the amount of data built up in your Event + Hub before an capture operation, value should be between 10485760 to 524288000 bytes. + :type size_limit_in_bytes: int + :param destination: Properties of Destination where capture will be stored. (Storage Account, + Blob Names). + :type destination: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Destination + :param skip_empty_archives: A value that indicates whether to Skip Empty Archives. + :type skip_empty_archives: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'encoding': {'key': 'encoding', 'type': 'str'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + 'size_limit_in_bytes': {'key': 'sizeLimitInBytes', 'type': 'int'}, + 'destination': {'key': 'destination', 'type': 'Destination'}, + 'skip_empty_archives': {'key': 'skipEmptyArchives', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(CaptureDescription, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.encoding = kwargs.get('encoding', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + self.size_limit_in_bytes = kwargs.get('size_limit_in_bytes', None) + self.destination = kwargs.get('destination', None) + self.skip_empty_archives = kwargs.get('skip_empty_archives', None) + + +class CheckNameAvailabilityParameter(msrest.serialization.Model): + """Parameter supplied to check Namespace name availability operation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name to check the namespace name availability. + :type name: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CheckNameAvailabilityParameter, self).__init__(**kwargs) + self.name = kwargs['name'] + + +class CheckNameAvailabilityResult(msrest.serialization.Model): + """The Result of the CheckNameAvailability operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar message: The detailed info regarding the reason associated with the Namespace. + :vartype message: str + :param name_available: Value indicating Namespace is availability, true if the Namespace is + available; otherwise, false. + :type name_available: bool + :param reason: The reason for unavailability of a Namespace. Possible values include: "None", + "InvalidName", "SubscriptionIsDisabled", "NameInUse", "NameInLockdown", + "TooManyNamespaceInCurrentSubscription". + :type reason: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.UnavailableReason + """ + + _validation = { + 'message': {'readonly': True}, + } + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, + 'reason': {'key': 'reason', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CheckNameAvailabilityResult, self).__init__(**kwargs) + self.message = None + self.name_available = kwargs.get('name_available', None) + self.reason = kwargs.get('reason', None) + + +class TrackedResource(Resource): + """Definition of resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + + +class Cluster(TrackedResource): + """Single Event Hubs Cluster resource in List or Get operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param sku: Properties of the cluster SKU. + :type sku: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterSku + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar created_at: The UTC time when the Event Hubs Cluster was created. + :vartype created_at: str + :ivar updated_at: The UTC time when the Event Hubs Cluster was last updated. + :vartype updated_at: str + :ivar metric_id: The metric ID of the cluster resource. Provided by the service and not + modifiable by the user. + :vartype metric_id: str + :ivar status: Status of the Cluster resource. + :vartype status: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'metric_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'str'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'str'}, + 'metric_id': {'key': 'properties.metricId', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Cluster, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.system_data = None + self.created_at = None + self.updated_at = None + self.metric_id = None + self.status = None + + +class ClusterListResult(msrest.serialization.Model): + """The response of the List Event Hubs Clusters operation. + + :param value: The Event Hubs Clusters present in the List Event Hubs operation results. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :param next_link: Link to the next set of results. Empty unless the value parameter contains an + incomplete list of Event Hubs Clusters. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Cluster]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class ClusterQuotaConfigurationProperties(msrest.serialization.Model): + """Contains all settings for the cluster. + + :param settings: All possible Cluster settings - a collection of key/value paired settings + which apply to quotas and configurations imposed on the cluster. + :type settings: dict[str, str] + """ + + _attribute_map = { + 'settings': {'key': 'settings', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterQuotaConfigurationProperties, self).__init__(**kwargs) + self.settings = kwargs.get('settings', None) + + +class ClusterSku(msrest.serialization.Model): + """SKU parameters particular to a cluster instance. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of this SKU. Possible values include: "Dedicated". + :type name: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterSkuName + :param capacity: The quantity of Event Hubs Cluster Capacity Units contained in this cluster. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + 'capacity': {'minimum': 1}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.name = kwargs['name'] + self.capacity = kwargs.get('capacity', None) + + +class ConnectionState(msrest.serialization.Model): + """ConnectionState information. + + :param status: Status of the connection. Possible values include: "Pending", "Approved", + "Rejected", "Disconnected". + :type status: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkConnectionStatus + :param description: Description of the connection state. + :type description: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + + +class ConsumerGroup(Resource): + """Single item in List or Get Consumer group operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar created_at: Exact time the message was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The exact time the message was updated. + :vartype updated_at: ~datetime.datetime + :param user_metadata: User Metadata is a placeholder to store user-defined string data with + maximum length 1024. e.g. it can be used to store descriptive data, such as list of teams and + their contact information also user-defined configuration settings can be stored. + :type user_metadata: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'user_metadata': {'key': 'properties.userMetadata', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ConsumerGroup, self).__init__(**kwargs) + self.system_data = None + self.created_at = None + self.updated_at = None + self.user_metadata = kwargs.get('user_metadata', None) + + +class ConsumerGroupListResult(msrest.serialization.Model): + """The result to the List Consumer Group operation. + + :param value: Result of the List Consumer Group operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of Consumer Group. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ConsumerGroup]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ConsumerGroupListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Destination(msrest.serialization.Model): + """Capture storage details for capture description. + + :param name: Name for capture destination. + :type name: str + :param storage_account_resource_id: Resource id of the storage account to be used to create the + blobs. + :type storage_account_resource_id: str + :param blob_container: Blob container Name. + :type blob_container: str + :param archive_name_format: Blob naming convention for archive, e.g. + {Namespace}/{EventHub}/{PartitionId}/{Year}/{Month}/{Day}/{Hour}/{Minute}/{Second}. Here all + the parameters (Namespace,EventHub .. etc) are mandatory irrespective of order. + :type archive_name_format: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'}, + 'blob_container': {'key': 'properties.blobContainer', 'type': 'str'}, + 'archive_name_format': {'key': 'properties.archiveNameFormat', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Destination, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.storage_account_resource_id = kwargs.get('storage_account_resource_id', None) + self.blob_container = kwargs.get('blob_container', None) + self.archive_name_format = kwargs.get('archive_name_format', None) + + +class EHNamespace(TrackedResource): + """Single Namespace item in List or Get Operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param sku: Properties of sku resource. + :type sku: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Sku + :param identity: Properties of BYOK Identity description. + :type identity: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Identity + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar provisioning_state: Provisioning state of the Namespace. + :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str + :ivar created_at: The time the Namespace was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The time the Namespace was updated. + :vartype updated_at: ~datetime.datetime + :ivar service_bus_endpoint: Endpoint you can use to perform Service Bus operations. + :vartype service_bus_endpoint: str + :param cluster_arm_id: Cluster ARM ID of the Namespace. + :type cluster_arm_id: str + :ivar metric_id: Identifier for Azure Insights metrics. + :vartype metric_id: str + :param is_auto_inflate_enabled: Value that indicates whether AutoInflate is enabled for + eventhub namespace. + :type is_auto_inflate_enabled: bool + :param maximum_throughput_units: Upper limit of throughput units when AutoInflate is enabled, + value should be within 0 to 20 throughput units. ( '0' if AutoInflateEnabled = true). + :type maximum_throughput_units: int + :param kafka_enabled: Value that indicates whether Kafka is enabled for eventhub namespace. + :type kafka_enabled: bool + :param zone_redundant: Enabling this property creates a Standard Event Hubs Namespace in + regions supported availability zones. + :type zone_redundant: bool + :param encryption: Properties of BYOK Encryption description. + :type encryption: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Encryption + :param private_endpoint_connections: List of private endpoint connections. + :type private_endpoint_connections: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection] + :param disable_local_auth: This property disables SAS authentication for the Event Hubs + namespace. + :type disable_local_auth: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'service_bus_endpoint': {'readonly': True}, + 'metric_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, + 'cluster_arm_id': {'key': 'properties.clusterArmId', 'type': 'str'}, + 'metric_id': {'key': 'properties.metricId', 'type': 'str'}, + 'is_auto_inflate_enabled': {'key': 'properties.isAutoInflateEnabled', 'type': 'bool'}, + 'maximum_throughput_units': {'key': 'properties.maximumThroughputUnits', 'type': 'int'}, + 'kafka_enabled': {'key': 'properties.kafkaEnabled', 'type': 'bool'}, + 'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'disable_local_auth': {'key': 'properties.disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(EHNamespace, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.identity = kwargs.get('identity', None) + self.system_data = None + self.provisioning_state = None + self.status = None + self.created_at = None + self.updated_at = None + self.service_bus_endpoint = None + self.cluster_arm_id = kwargs.get('cluster_arm_id', None) + self.metric_id = None + self.is_auto_inflate_enabled = kwargs.get('is_auto_inflate_enabled', None) + self.maximum_throughput_units = kwargs.get('maximum_throughput_units', None) + self.kafka_enabled = kwargs.get('kafka_enabled', None) + self.zone_redundant = kwargs.get('zone_redundant', None) + self.encryption = kwargs.get('encryption', None) + self.private_endpoint_connections = kwargs.get('private_endpoint_connections', None) + self.disable_local_auth = kwargs.get('disable_local_auth', None) + + +class EHNamespaceIdContainer(msrest.serialization.Model): + """The full ARM ID of an Event Hubs Namespace. + + :param id: id parameter. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EHNamespaceIdContainer, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class EHNamespaceIdListResult(msrest.serialization.Model): + """The response of the List Namespace IDs operation. + + :param value: Result of the List Namespace IDs operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceIdContainer] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EHNamespaceIdContainer]'}, + } + + def __init__( + self, + **kwargs + ): + super(EHNamespaceIdListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class EHNamespaceListResult(msrest.serialization.Model): + """The response of the List Namespace operation. + + :param value: Result of the List Namespace operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of namespaces. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EHNamespace]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EHNamespaceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Encryption(msrest.serialization.Model): + """Properties to configure Encryption. + + :param key_vault_properties: Properties of KeyVault. + :type key_vault_properties: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.KeyVaultProperties] + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is + "Microsoft.KeyVault". + :type key_source: str + :param require_infrastructure_encryption: Enable Infrastructure Encryption (Double Encryption). + :type require_infrastructure_encryption: bool + """ + + _attribute_map = { + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(Encryption, self).__init__(**kwargs) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.key_source = kwargs.get('key_source', "Microsoft.KeyVault") + self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) + + +class ErrorResponse(msrest.serialization.Model): + """Error response indicates Event Hub service is not able to process the incoming request. The reason is provided in the error message. + + :param code: Error code. + :type code: str + :param message: Error message indicating why the operation failed. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + + +class Eventhub(Resource): + """Single item in List or Get Event Hub operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar partition_ids: Current number of shards on the Event Hub. + :vartype partition_ids: list[str] + :ivar created_at: Exact time the Event Hub was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The exact time the message was updated. + :vartype updated_at: ~datetime.datetime + :param message_retention_in_days: Number of days to retain the events for this Event Hub, value + should be 1 to 7 days. + :type message_retention_in_days: long + :param partition_count: Number of partitions created for the Event Hub, allowed values are from + 1 to 32 partitions. + :type partition_count: long + :param status: Enumerates the possible values for the status of the Event Hub. Possible values + include: "Active", "Disabled", "Restoring", "SendDisabled", "ReceiveDisabled", "Creating", + "Deleting", "Renaming", "Unknown". + :type status: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.EntityStatus + :param capture_description: Properties of capture description. + :type capture_description: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CaptureDescription + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'partition_ids': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'message_retention_in_days': {'minimum': 1}, + 'partition_count': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'partition_ids': {'key': 'properties.partitionIds', 'type': '[str]'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'message_retention_in_days': {'key': 'properties.messageRetentionInDays', 'type': 'long'}, + 'partition_count': {'key': 'properties.partitionCount', 'type': 'long'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'capture_description': {'key': 'properties.captureDescription', 'type': 'CaptureDescription'}, + } + + def __init__( + self, + **kwargs + ): + super(Eventhub, self).__init__(**kwargs) + self.system_data = None + self.partition_ids = None + self.created_at = None + self.updated_at = None + self.message_retention_in_days = kwargs.get('message_retention_in_days', None) + self.partition_count = kwargs.get('partition_count', None) + self.status = kwargs.get('status', None) + self.capture_description = kwargs.get('capture_description', None) + + +class EventHubListResult(msrest.serialization.Model): + """The result of the List EventHubs operation. + + :param value: Result of the List EventHubs operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of EventHubs. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Eventhub]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EventHubListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Identity(msrest.serialization.Model): + """Properties to configure Identity for Bring your Own Keys. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: ObjectId from the KeyVault. + :vartype principal_id: str + :ivar tenant_id: TenantId from the KeyVault. + :vartype tenant_id: str + :param type: Type of managed service identity. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned, UserAssigned", "None". + :type type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.ManagedServiceIdentityType + :param user_assigned_identities: Properties for User Assigned Identities. + :type user_assigned_identities: dict[str, + ~azure.mgmt.eventhub.v2021_06_01_preview.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class KeyVaultProperties(msrest.serialization.Model): + """Properties to configure keyVault Properties. + + :param key_name: Name of the Key from KeyVault. + :type key_name: str + :param key_vault_uri: Uri of KeyVault. + :type key_vault_uri: str + :param key_version: Key Version. + :type key_version: str + :param identity: + :type identity: ~azure.mgmt.eventhub.v2021_06_01_preview.models.UserAssignedIdentityProperties + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'UserAssignedIdentityProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) + self.key_vault_uri = kwargs.get('key_vault_uri', None) + self.key_version = kwargs.get('key_version', None) + self.identity = kwargs.get('identity', None) + + +class NetworkRuleSet(Resource): + """Description of topic resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param trusted_service_access_enabled: Value that indicates whether Trusted Service Access is + Enabled or not. + :type trusted_service_access_enabled: bool + :param default_action: Default Action for Network Rule Set. Possible values include: "Allow", + "Deny". + :type default_action: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.DefaultAction + :param virtual_network_rules: List VirtualNetwork Rules. + :type virtual_network_rules: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.NWRuleSetVirtualNetworkRules] + :param ip_rules: List of IpRules. + :type ip_rules: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.NWRuleSetIpRules] + :param public_network_access: This determines if traffic is allowed over public network. By + default it is enabled. Possible values include: "Enabled", "Disabled". Default value: + "Enabled". + :type public_network_access: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.PublicNetworkAccessFlag + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'trusted_service_access_enabled': {'key': 'properties.trustedServiceAccessEnabled', 'type': 'bool'}, + 'default_action': {'key': 'properties.defaultAction', 'type': 'str'}, + 'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[NWRuleSetVirtualNetworkRules]'}, + 'ip_rules': {'key': 'properties.ipRules', 'type': '[NWRuleSetIpRules]'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(NetworkRuleSet, self).__init__(**kwargs) + self.system_data = None + self.trusted_service_access_enabled = kwargs.get('trusted_service_access_enabled', None) + self.default_action = kwargs.get('default_action', None) + self.virtual_network_rules = kwargs.get('virtual_network_rules', None) + self.ip_rules = kwargs.get('ip_rules', None) + self.public_network_access = kwargs.get('public_network_access', "Enabled") + + +class NWRuleSetIpRules(msrest.serialization.Model): + """The response from the List namespace operation. + + :param ip_mask: IP Mask. + :type ip_mask: str + :param action: The IP Filter Action. Possible values include: "Allow". + :type action: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleIPAction + """ + + _attribute_map = { + 'ip_mask': {'key': 'ipMask', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(NWRuleSetIpRules, self).__init__(**kwargs) + self.ip_mask = kwargs.get('ip_mask', None) + self.action = kwargs.get('action', None) + + +class NWRuleSetVirtualNetworkRules(msrest.serialization.Model): + """The response from the List namespace operation. + + :param subnet: Subnet properties. + :type subnet: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Subnet + :param ignore_missing_vnet_service_endpoint: Value that indicates whether to ignore missing + Vnet Service Endpoint. + :type ignore_missing_vnet_service_endpoint: bool + """ + + _attribute_map = { + 'subnet': {'key': 'subnet', 'type': 'Subnet'}, + 'ignore_missing_vnet_service_endpoint': {'key': 'ignoreMissingVnetServiceEndpoint', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(NWRuleSetVirtualNetworkRules, self).__init__(**kwargs) + self.subnet = kwargs.get('subnet', None) + self.ignore_missing_vnet_service_endpoint = kwargs.get('ignore_missing_vnet_service_endpoint', None) + + +class Operation(msrest.serialization.Model): + """A Event Hub REST API operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :param display: The object that represents the operation. + :type display: ~azure.mgmt.eventhub.v2021_06_01_preview.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = kwargs.get('display', None) + + +class OperationDisplay(msrest.serialization.Model): + """The object that represents the operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provider: Service provider: Microsoft.EventHub. + :vartype provider: str + :ivar resource: Resource on which the operation is performed: Invoice, etc. + :vartype resource: str + :ivar operation: Operation type: Read, write, delete, etc. + :vartype operation: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Event Hub operations. It contains a list of operations and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Event Hub operations supported by the Microsoft.EventHub resource + provider. + :vartype value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PrivateEndpoint(msrest.serialization.Model): + """PrivateEndpoint information. + + :param id: The ARM identifier for Private Endpoint. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class PrivateEndpointConnection(Resource): + """Properties of the PrivateEndpointConnection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param private_endpoint: The Private Endpoint resource for this Connection. + :type private_endpoint: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpoint + :param private_link_service_connection_state: Details about the state of the connection. + :type private_link_service_connection_state: + ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConnectionState + :param provisioning_state: Provisioning state of the Private Endpoint Connection. Possible + values include: "Creating", "Updating", "Deleting", "Succeeded", "Canceled", "Failed". + :type provisioning_state: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.EndPointProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'ConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.system_data = None + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + + +class PrivateEndpointConnectionListResult(msrest.serialization.Model): + """Result of the list of all private endpoint connections operation. + + :param value: A collection of private endpoint connection resources. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection] + :param next_link: A link for the next page of private endpoint connection resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class PrivateLinkResource(msrest.serialization.Model): + """Information of the private link resource. + + :param id: Fully qualified identifier of the resource. + :type id: str + :param name: Name of the resource. + :type name: str + :param type: Type of the resource. + :type type: str + :param group_id: The private link resource group id. + :type group_id: str + :param required_members: The private link resource required member names. + :type required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + self.group_id = kwargs.get('group_id', None) + self.required_members = kwargs.get('required_members', None) + self.required_zone_names = kwargs.get('required_zone_names', None) + + +class PrivateLinkResourcesListResult(msrest.serialization.Model): + """Result of the List private link resources operation. + + :param value: A collection of private link resources. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkResource] + :param next_link: A link for the next page of private link resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourcesListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class RegenerateAccessKeyParameters(msrest.serialization.Model): + """Parameters supplied to the Regenerate Authorization Rule operation, specifies which key needs to be reset. + + All required parameters must be populated in order to send to Azure. + + :param key_type: Required. The access key to regenerate. Possible values include: "PrimaryKey", + "SecondaryKey". + :type key_type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.KeyType + :param key: Optional, if the key value provided, is set for KeyType or autogenerated Key value + set for keyType. + :type key: str + """ + + _validation = { + 'key_type': {'required': True}, + } + + _attribute_map = { + 'key_type': {'key': 'keyType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(RegenerateAccessKeyParameters, self).__init__(**kwargs) + self.key_type = kwargs['key_type'] + self.key = kwargs.get('key', None) + + +class Sku(msrest.serialization.Model): + """SKU parameters supplied to the create namespace operation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of this SKU. Possible values include: "Basic", "Standard", + "Premium". + :type name: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.SkuName + :param tier: The billing tier of this particular SKU. Possible values include: "Basic", + "Standard", "Premium". + :type tier: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.SkuTier + :param capacity: The Event Hubs throughput units for Basic or Standard tiers, where value + should be 0 to 20 throughput units. The Event Hubs premium units for Premium tier, where value + should be 0 to 10 premium units. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + 'capacity': {'minimum': 0}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = kwargs['name'] + self.tier = kwargs.get('tier', None) + self.capacity = kwargs.get('capacity', None) + + +class Subnet(msrest.serialization.Model): + """Properties supplied for Subnet. + + :param id: Resource ID of Virtual Network Subnet. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Subnet, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.CreatedByType + :param last_modified_at: The type of identity that last modified the resource. + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class UserAssignedIdentity(msrest.serialization.Model): + """Recognized Dictionary value. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: Principal Id of user assigned identity. + :vartype principal_id: str + :ivar client_id: Client Id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedIdentityProperties(msrest.serialization.Model): + """UserAssignedIdentityProperties. + + :param user_assigned_identity: ARM ID of user Identity selected for encryption. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentityProperties, self).__init__(**kwargs) + self.user_assigned_identity = kwargs.get('user_assigned_identity', None) diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models_py3.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models_py3.py new file mode 100644 index 000000000000..d43fd908426a --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/models/_models_py3.py @@ -0,0 +1,1824 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._event_hub_management_client_enums import * + + +class AccessKeys(msrest.serialization.Model): + """Namespace/EventHub Connection String. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar primary_connection_string: Primary connection string of the created namespace + AuthorizationRule. + :vartype primary_connection_string: str + :ivar secondary_connection_string: Secondary connection string of the created namespace + AuthorizationRule. + :vartype secondary_connection_string: str + :ivar alias_primary_connection_string: Primary connection string of the alias if GEO DR is + enabled. + :vartype alias_primary_connection_string: str + :ivar alias_secondary_connection_string: Secondary connection string of the alias if GEO DR is + enabled. + :vartype alias_secondary_connection_string: str + :ivar primary_key: A base64-encoded 256-bit primary key for signing and validating the SAS + token. + :vartype primary_key: str + :ivar secondary_key: A base64-encoded 256-bit primary key for signing and validating the SAS + token. + :vartype secondary_key: str + :ivar key_name: A string that describes the AuthorizationRule. + :vartype key_name: str + """ + + _validation = { + 'primary_connection_string': {'readonly': True}, + 'secondary_connection_string': {'readonly': True}, + 'alias_primary_connection_string': {'readonly': True}, + 'alias_secondary_connection_string': {'readonly': True}, + 'primary_key': {'readonly': True}, + 'secondary_key': {'readonly': True}, + 'key_name': {'readonly': True}, + } + + _attribute_map = { + 'primary_connection_string': {'key': 'primaryConnectionString', 'type': 'str'}, + 'secondary_connection_string': {'key': 'secondaryConnectionString', 'type': 'str'}, + 'alias_primary_connection_string': {'key': 'aliasPrimaryConnectionString', 'type': 'str'}, + 'alias_secondary_connection_string': {'key': 'aliasSecondaryConnectionString', 'type': 'str'}, + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AccessKeys, self).__init__(**kwargs) + self.primary_connection_string = None + self.secondary_connection_string = None + self.alias_primary_connection_string = None + self.alias_secondary_connection_string = None + self.primary_key = None + self.secondary_key = None + self.key_name = None + + +class Resource(msrest.serialization.Model): + """The resource definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class ArmDisasterRecovery(Resource): + """Single item in List or Get Alias(Disaster Recovery configuration) operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar provisioning_state: Provisioning state of the Alias(Disaster Recovery configuration) - + possible values 'Accepted' or 'Succeeded' or 'Failed'. Possible values include: "Accepted", + "Succeeded", "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.ProvisioningStateDR + :param partner_namespace: ARM Id of the Primary/Secondary eventhub namespace name, which is + part of GEO DR pairing. + :type partner_namespace: str + :param alternate_name: Alternate name specified when alias and namespace names are same. + :type alternate_name: str + :ivar role: role of namespace in GEO DR - possible values 'Primary' or 'PrimaryNotReplicating' + or 'Secondary'. Possible values include: "Primary", "PrimaryNotReplicating", "Secondary". + :vartype role: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.RoleDisasterRecovery + :ivar pending_replication_operations_count: Number of entities pending to be replicated. + :vartype pending_replication_operations_count: long + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'role': {'readonly': True}, + 'pending_replication_operations_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'partner_namespace': {'key': 'properties.partnerNamespace', 'type': 'str'}, + 'alternate_name': {'key': 'properties.alternateName', 'type': 'str'}, + 'role': {'key': 'properties.role', 'type': 'str'}, + 'pending_replication_operations_count': {'key': 'properties.pendingReplicationOperationsCount', 'type': 'long'}, + } + + def __init__( + self, + *, + partner_namespace: Optional[str] = None, + alternate_name: Optional[str] = None, + **kwargs + ): + super(ArmDisasterRecovery, self).__init__(**kwargs) + self.system_data = None + self.provisioning_state = None + self.partner_namespace = partner_namespace + self.alternate_name = alternate_name + self.role = None + self.pending_replication_operations_count = None + + +class ArmDisasterRecoveryListResult(msrest.serialization.Model): + """The result of the List Alias(Disaster Recovery configuration) operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param value: List of Alias(Disaster Recovery configurations). + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery] + :ivar next_link: Link to the next set of results. Not empty if Value contains incomplete list + of Alias(Disaster Recovery configuration). + :vartype next_link: str + """ + + _validation = { + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ArmDisasterRecovery]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["ArmDisasterRecovery"]] = None, + **kwargs + ): + super(ArmDisasterRecoveryListResult, self).__init__(**kwargs) + self.value = value + self.next_link = None + + +class AuthorizationRule(Resource): + """Single item in a List or Get AuthorizationRule operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param rights: The rights associated with the rule. + :type rights: list[str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessRights] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'rights': {'key': 'properties.rights', 'type': '[str]'}, + } + + def __init__( + self, + *, + rights: Optional[List[Union[str, "AccessRights"]]] = None, + **kwargs + ): + super(AuthorizationRule, self).__init__(**kwargs) + self.system_data = None + self.rights = rights + + +class AuthorizationRuleListResult(msrest.serialization.Model): + """The response from the List namespace operation. + + :param value: Result of the List Authorization Rules operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule] + :param next_link: Link to the next set of results. Not empty if Value contains an incomplete + list of Authorization Rules. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AuthorizationRule]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["AuthorizationRule"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(AuthorizationRuleListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class AvailableCluster(msrest.serialization.Model): + """Pre-provisioned and readily available Event Hubs Cluster count per region. + + :param location: Location fo the Available Cluster. + :type location: str + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + **kwargs + ): + super(AvailableCluster, self).__init__(**kwargs) + self.location = location + + +class AvailableClustersList(msrest.serialization.Model): + """The response of the List Available Clusters operation. + + :param value: The count of readily available and pre-provisioned Event Hubs Clusters per + region. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.AvailableCluster] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AvailableCluster]'}, + } + + def __init__( + self, + *, + value: Optional[List["AvailableCluster"]] = None, + **kwargs + ): + super(AvailableClustersList, self).__init__(**kwargs) + self.value = value + + +class CaptureDescription(msrest.serialization.Model): + """Properties to configure capture description for eventhub. + + :param enabled: A value that indicates whether capture description is enabled. + :type enabled: bool + :param encoding: Enumerates the possible values for the encoding format of capture description. + Note: 'AvroDeflate' will be deprecated in New API Version. Possible values include: "Avro", + "AvroDeflate". + :type encoding: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.EncodingCaptureDescription + :param interval_in_seconds: The time window allows you to set the frequency with which the + capture to Azure Blobs will happen, value should between 60 to 900 seconds. + :type interval_in_seconds: int + :param size_limit_in_bytes: The size window defines the amount of data built up in your Event + Hub before an capture operation, value should be between 10485760 to 524288000 bytes. + :type size_limit_in_bytes: int + :param destination: Properties of Destination where capture will be stored. (Storage Account, + Blob Names). + :type destination: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Destination + :param skip_empty_archives: A value that indicates whether to Skip Empty Archives. + :type skip_empty_archives: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'encoding': {'key': 'encoding', 'type': 'str'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + 'size_limit_in_bytes': {'key': 'sizeLimitInBytes', 'type': 'int'}, + 'destination': {'key': 'destination', 'type': 'Destination'}, + 'skip_empty_archives': {'key': 'skipEmptyArchives', 'type': 'bool'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + encoding: Optional[Union[str, "EncodingCaptureDescription"]] = None, + interval_in_seconds: Optional[int] = None, + size_limit_in_bytes: Optional[int] = None, + destination: Optional["Destination"] = None, + skip_empty_archives: Optional[bool] = None, + **kwargs + ): + super(CaptureDescription, self).__init__(**kwargs) + self.enabled = enabled + self.encoding = encoding + self.interval_in_seconds = interval_in_seconds + self.size_limit_in_bytes = size_limit_in_bytes + self.destination = destination + self.skip_empty_archives = skip_empty_archives + + +class CheckNameAvailabilityParameter(msrest.serialization.Model): + """Parameter supplied to check Namespace name availability operation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name to check the namespace name availability. + :type name: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + **kwargs + ): + super(CheckNameAvailabilityParameter, self).__init__(**kwargs) + self.name = name + + +class CheckNameAvailabilityResult(msrest.serialization.Model): + """The Result of the CheckNameAvailability operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar message: The detailed info regarding the reason associated with the Namespace. + :vartype message: str + :param name_available: Value indicating Namespace is availability, true if the Namespace is + available; otherwise, false. + :type name_available: bool + :param reason: The reason for unavailability of a Namespace. Possible values include: "None", + "InvalidName", "SubscriptionIsDisabled", "NameInUse", "NameInLockdown", + "TooManyNamespaceInCurrentSubscription". + :type reason: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.UnavailableReason + """ + + _validation = { + 'message': {'readonly': True}, + } + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, + 'reason': {'key': 'reason', 'type': 'str'}, + } + + def __init__( + self, + *, + name_available: Optional[bool] = None, + reason: Optional[Union[str, "UnavailableReason"]] = None, + **kwargs + ): + super(CheckNameAvailabilityResult, self).__init__(**kwargs) + self.message = None + self.name_available = name_available + self.reason = reason + + +class TrackedResource(Resource): + """Definition of resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.location = location + self.tags = tags + + +class Cluster(TrackedResource): + """Single Event Hubs Cluster resource in List or Get operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param sku: Properties of the cluster SKU. + :type sku: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterSku + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar created_at: The UTC time when the Event Hubs Cluster was created. + :vartype created_at: str + :ivar updated_at: The UTC time when the Event Hubs Cluster was last updated. + :vartype updated_at: str + :ivar metric_id: The metric ID of the cluster resource. Provided by the service and not + modifiable by the user. + :vartype metric_id: str + :ivar status: Status of the Cluster resource. + :vartype status: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'metric_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'str'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'str'}, + 'metric_id': {'key': 'properties.metricId', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["ClusterSku"] = None, + **kwargs + ): + super(Cluster, self).__init__(location=location, tags=tags, **kwargs) + self.sku = sku + self.system_data = None + self.created_at = None + self.updated_at = None + self.metric_id = None + self.status = None + + +class ClusterListResult(msrest.serialization.Model): + """The response of the List Event Hubs Clusters operation. + + :param value: The Event Hubs Clusters present in the List Event Hubs operation results. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :param next_link: Link to the next set of results. Empty unless the value parameter contains an + incomplete list of Event Hubs Clusters. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Cluster]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["Cluster"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ClusterQuotaConfigurationProperties(msrest.serialization.Model): + """Contains all settings for the cluster. + + :param settings: All possible Cluster settings - a collection of key/value paired settings + which apply to quotas and configurations imposed on the cluster. + :type settings: dict[str, str] + """ + + _attribute_map = { + 'settings': {'key': 'settings', 'type': '{str}'}, + } + + def __init__( + self, + *, + settings: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ClusterQuotaConfigurationProperties, self).__init__(**kwargs) + self.settings = settings + + +class ClusterSku(msrest.serialization.Model): + """SKU parameters particular to a cluster instance. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of this SKU. Possible values include: "Dedicated". + :type name: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterSkuName + :param capacity: The quantity of Event Hubs Cluster Capacity Units contained in this cluster. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + 'capacity': {'minimum': 1}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Union[str, "ClusterSkuName"], + capacity: Optional[int] = None, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.name = name + self.capacity = capacity + + +class ConnectionState(msrest.serialization.Model): + """ConnectionState information. + + :param status: Status of the connection. Possible values include: "Pending", "Approved", + "Rejected", "Disconnected". + :type status: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkConnectionStatus + :param description: Description of the connection state. + :type description: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "PrivateLinkConnectionStatus"]] = None, + description: Optional[str] = None, + **kwargs + ): + super(ConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + + +class ConsumerGroup(Resource): + """Single item in List or Get Consumer group operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar created_at: Exact time the message was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The exact time the message was updated. + :vartype updated_at: ~datetime.datetime + :param user_metadata: User Metadata is a placeholder to store user-defined string data with + maximum length 1024. e.g. it can be used to store descriptive data, such as list of teams and + their contact information also user-defined configuration settings can be stored. + :type user_metadata: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'user_metadata': {'key': 'properties.userMetadata', 'type': 'str'}, + } + + def __init__( + self, + *, + user_metadata: Optional[str] = None, + **kwargs + ): + super(ConsumerGroup, self).__init__(**kwargs) + self.system_data = None + self.created_at = None + self.updated_at = None + self.user_metadata = user_metadata + + +class ConsumerGroupListResult(msrest.serialization.Model): + """The result to the List Consumer Group operation. + + :param value: Result of the List Consumer Group operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of Consumer Group. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ConsumerGroup]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["ConsumerGroup"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(ConsumerGroupListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Destination(msrest.serialization.Model): + """Capture storage details for capture description. + + :param name: Name for capture destination. + :type name: str + :param storage_account_resource_id: Resource id of the storage account to be used to create the + blobs. + :type storage_account_resource_id: str + :param blob_container: Blob container Name. + :type blob_container: str + :param archive_name_format: Blob naming convention for archive, e.g. + {Namespace}/{EventHub}/{PartitionId}/{Year}/{Month}/{Day}/{Hour}/{Minute}/{Second}. Here all + the parameters (Namespace,EventHub .. etc) are mandatory irrespective of order. + :type archive_name_format: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'}, + 'blob_container': {'key': 'properties.blobContainer', 'type': 'str'}, + 'archive_name_format': {'key': 'properties.archiveNameFormat', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + storage_account_resource_id: Optional[str] = None, + blob_container: Optional[str] = None, + archive_name_format: Optional[str] = None, + **kwargs + ): + super(Destination, self).__init__(**kwargs) + self.name = name + self.storage_account_resource_id = storage_account_resource_id + self.blob_container = blob_container + self.archive_name_format = archive_name_format + + +class EHNamespace(TrackedResource): + """Single Namespace item in List or Get Operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param location: Resource location. + :type location: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param sku: Properties of sku resource. + :type sku: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Sku + :param identity: Properties of BYOK Identity description. + :type identity: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Identity + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar provisioning_state: Provisioning state of the Namespace. + :vartype provisioning_state: str + :ivar status: Status of the Namespace. + :vartype status: str + :ivar created_at: The time the Namespace was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The time the Namespace was updated. + :vartype updated_at: ~datetime.datetime + :ivar service_bus_endpoint: Endpoint you can use to perform Service Bus operations. + :vartype service_bus_endpoint: str + :param cluster_arm_id: Cluster ARM ID of the Namespace. + :type cluster_arm_id: str + :ivar metric_id: Identifier for Azure Insights metrics. + :vartype metric_id: str + :param is_auto_inflate_enabled: Value that indicates whether AutoInflate is enabled for + eventhub namespace. + :type is_auto_inflate_enabled: bool + :param maximum_throughput_units: Upper limit of throughput units when AutoInflate is enabled, + value should be within 0 to 20 throughput units. ( '0' if AutoInflateEnabled = true). + :type maximum_throughput_units: int + :param kafka_enabled: Value that indicates whether Kafka is enabled for eventhub namespace. + :type kafka_enabled: bool + :param zone_redundant: Enabling this property creates a Standard Event Hubs Namespace in + regions supported availability zones. + :type zone_redundant: bool + :param encryption: Properties of BYOK Encryption description. + :type encryption: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Encryption + :param private_endpoint_connections: List of private endpoint connections. + :type private_endpoint_connections: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection] + :param disable_local_auth: This property disables SAS authentication for the Event Hubs + namespace. + :type disable_local_auth: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'service_bus_endpoint': {'readonly': True}, + 'metric_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'service_bus_endpoint': {'key': 'properties.serviceBusEndpoint', 'type': 'str'}, + 'cluster_arm_id': {'key': 'properties.clusterArmId', 'type': 'str'}, + 'metric_id': {'key': 'properties.metricId', 'type': 'str'}, + 'is_auto_inflate_enabled': {'key': 'properties.isAutoInflateEnabled', 'type': 'bool'}, + 'maximum_throughput_units': {'key': 'properties.maximumThroughputUnits', 'type': 'int'}, + 'kafka_enabled': {'key': 'properties.kafkaEnabled', 'type': 'bool'}, + 'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'disable_local_auth': {'key': 'properties.disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + identity: Optional["Identity"] = None, + cluster_arm_id: Optional[str] = None, + is_auto_inflate_enabled: Optional[bool] = None, + maximum_throughput_units: Optional[int] = None, + kafka_enabled: Optional[bool] = None, + zone_redundant: Optional[bool] = None, + encryption: Optional["Encryption"] = None, + private_endpoint_connections: Optional[List["PrivateEndpointConnection"]] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + super(EHNamespace, self).__init__(location=location, tags=tags, **kwargs) + self.sku = sku + self.identity = identity + self.system_data = None + self.provisioning_state = None + self.status = None + self.created_at = None + self.updated_at = None + self.service_bus_endpoint = None + self.cluster_arm_id = cluster_arm_id + self.metric_id = None + self.is_auto_inflate_enabled = is_auto_inflate_enabled + self.maximum_throughput_units = maximum_throughput_units + self.kafka_enabled = kafka_enabled + self.zone_redundant = zone_redundant + self.encryption = encryption + self.private_endpoint_connections = private_endpoint_connections + self.disable_local_auth = disable_local_auth + + +class EHNamespaceIdContainer(msrest.serialization.Model): + """The full ARM ID of an Event Hubs Namespace. + + :param id: id parameter. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + **kwargs + ): + super(EHNamespaceIdContainer, self).__init__(**kwargs) + self.id = id + + +class EHNamespaceIdListResult(msrest.serialization.Model): + """The response of the List Namespace IDs operation. + + :param value: Result of the List Namespace IDs operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceIdContainer] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EHNamespaceIdContainer]'}, + } + + def __init__( + self, + *, + value: Optional[List["EHNamespaceIdContainer"]] = None, + **kwargs + ): + super(EHNamespaceIdListResult, self).__init__(**kwargs) + self.value = value + + +class EHNamespaceListResult(msrest.serialization.Model): + """The response of the List Namespace operation. + + :param value: Result of the List Namespace operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of namespaces. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EHNamespace]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["EHNamespace"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(EHNamespaceListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Encryption(msrest.serialization.Model): + """Properties to configure Encryption. + + :param key_vault_properties: Properties of KeyVault. + :type key_vault_properties: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.KeyVaultProperties] + :param key_source: Enumerates the possible value of keySource for Encryption. The only + acceptable values to pass in are None and "Microsoft.KeyVault". The default value is + "Microsoft.KeyVault". + :type key_source: str + :param require_infrastructure_encryption: Enable Infrastructure Encryption (Double Encryption). + :type require_infrastructure_encryption: bool + """ + + _attribute_map = { + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': '[KeyVaultProperties]'}, + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, + } + + def __init__( + self, + *, + key_vault_properties: Optional[List["KeyVaultProperties"]] = None, + key_source: Optional[str] = "Microsoft.KeyVault", + require_infrastructure_encryption: Optional[bool] = None, + **kwargs + ): + super(Encryption, self).__init__(**kwargs) + self.key_vault_properties = key_vault_properties + self.key_source = key_source + self.require_infrastructure_encryption = require_infrastructure_encryption + + +class ErrorResponse(msrest.serialization.Model): + """Error response indicates Event Hub service is not able to process the incoming request. The reason is provided in the error message. + + :param code: Error code. + :type code: str + :param message: Error message indicating why the operation failed. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = code + self.message = message + + +class Eventhub(Resource): + """Single item in List or Get Event Hub operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :ivar partition_ids: Current number of shards on the Event Hub. + :vartype partition_ids: list[str] + :ivar created_at: Exact time the Event Hub was created. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The exact time the message was updated. + :vartype updated_at: ~datetime.datetime + :param message_retention_in_days: Number of days to retain the events for this Event Hub, value + should be 1 to 7 days. + :type message_retention_in_days: long + :param partition_count: Number of partitions created for the Event Hub, allowed values are from + 1 to 32 partitions. + :type partition_count: long + :param status: Enumerates the possible values for the status of the Event Hub. Possible values + include: "Active", "Disabled", "Restoring", "SendDisabled", "ReceiveDisabled", "Creating", + "Deleting", "Renaming", "Unknown". + :type status: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.EntityStatus + :param capture_description: Properties of capture description. + :type capture_description: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CaptureDescription + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'partition_ids': {'readonly': True}, + 'created_at': {'readonly': True}, + 'updated_at': {'readonly': True}, + 'message_retention_in_days': {'minimum': 1}, + 'partition_count': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'partition_ids': {'key': 'properties.partitionIds', 'type': '[str]'}, + 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'updated_at': {'key': 'properties.updatedAt', 'type': 'iso-8601'}, + 'message_retention_in_days': {'key': 'properties.messageRetentionInDays', 'type': 'long'}, + 'partition_count': {'key': 'properties.partitionCount', 'type': 'long'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'capture_description': {'key': 'properties.captureDescription', 'type': 'CaptureDescription'}, + } + + def __init__( + self, + *, + message_retention_in_days: Optional[int] = None, + partition_count: Optional[int] = None, + status: Optional[Union[str, "EntityStatus"]] = None, + capture_description: Optional["CaptureDescription"] = None, + **kwargs + ): + super(Eventhub, self).__init__(**kwargs) + self.system_data = None + self.partition_ids = None + self.created_at = None + self.updated_at = None + self.message_retention_in_days = message_retention_in_days + self.partition_count = partition_count + self.status = status + self.capture_description = capture_description + + +class EventHubListResult(msrest.serialization.Model): + """The result of the List EventHubs operation. + + :param value: Result of the List EventHubs operation. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub] + :param next_link: Link to the next set of results. Not empty if Value contains incomplete list + of EventHubs. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Eventhub]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["Eventhub"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(EventHubListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Identity(msrest.serialization.Model): + """Properties to configure Identity for Bring your Own Keys. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: ObjectId from the KeyVault. + :vartype principal_id: str + :ivar tenant_id: TenantId from the KeyVault. + :vartype tenant_id: str + :param type: Type of managed service identity. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned, UserAssigned", "None". + :type type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.ManagedServiceIdentityType + :param user_assigned_identities: Properties for User Assigned Identities. + :type user_assigned_identities: dict[str, + ~azure.mgmt.eventhub.v2021_06_01_preview.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "ManagedServiceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class KeyVaultProperties(msrest.serialization.Model): + """Properties to configure keyVault Properties. + + :param key_name: Name of the Key from KeyVault. + :type key_name: str + :param key_vault_uri: Uri of KeyVault. + :type key_vault_uri: str + :param key_version: Key Version. + :type key_version: str + :param identity: + :type identity: ~azure.mgmt.eventhub.v2021_06_01_preview.models.UserAssignedIdentityProperties + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'UserAssignedIdentityProperties'}, + } + + def __init__( + self, + *, + key_name: Optional[str] = None, + key_vault_uri: Optional[str] = None, + key_version: Optional[str] = None, + identity: Optional["UserAssignedIdentityProperties"] = None, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_name = key_name + self.key_vault_uri = key_vault_uri + self.key_version = key_version + self.identity = identity + + +class NetworkRuleSet(Resource): + """Description of topic resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param trusted_service_access_enabled: Value that indicates whether Trusted Service Access is + Enabled or not. + :type trusted_service_access_enabled: bool + :param default_action: Default Action for Network Rule Set. Possible values include: "Allow", + "Deny". + :type default_action: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.DefaultAction + :param virtual_network_rules: List VirtualNetwork Rules. + :type virtual_network_rules: + list[~azure.mgmt.eventhub.v2021_06_01_preview.models.NWRuleSetVirtualNetworkRules] + :param ip_rules: List of IpRules. + :type ip_rules: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.NWRuleSetIpRules] + :param public_network_access: This determines if traffic is allowed over public network. By + default it is enabled. Possible values include: "Enabled", "Disabled". Default value: + "Enabled". + :type public_network_access: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.PublicNetworkAccessFlag + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'trusted_service_access_enabled': {'key': 'properties.trustedServiceAccessEnabled', 'type': 'bool'}, + 'default_action': {'key': 'properties.defaultAction', 'type': 'str'}, + 'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[NWRuleSetVirtualNetworkRules]'}, + 'ip_rules': {'key': 'properties.ipRules', 'type': '[NWRuleSetIpRules]'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + } + + def __init__( + self, + *, + trusted_service_access_enabled: Optional[bool] = None, + default_action: Optional[Union[str, "DefaultAction"]] = None, + virtual_network_rules: Optional[List["NWRuleSetVirtualNetworkRules"]] = None, + ip_rules: Optional[List["NWRuleSetIpRules"]] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccessFlag"]] = "Enabled", + **kwargs + ): + super(NetworkRuleSet, self).__init__(**kwargs) + self.system_data = None + self.trusted_service_access_enabled = trusted_service_access_enabled + self.default_action = default_action + self.virtual_network_rules = virtual_network_rules + self.ip_rules = ip_rules + self.public_network_access = public_network_access + + +class NWRuleSetIpRules(msrest.serialization.Model): + """The response from the List namespace operation. + + :param ip_mask: IP Mask. + :type ip_mask: str + :param action: The IP Filter Action. Possible values include: "Allow". + :type action: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleIPAction + """ + + _attribute_map = { + 'ip_mask': {'key': 'ipMask', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + } + + def __init__( + self, + *, + ip_mask: Optional[str] = None, + action: Optional[Union[str, "NetworkRuleIPAction"]] = None, + **kwargs + ): + super(NWRuleSetIpRules, self).__init__(**kwargs) + self.ip_mask = ip_mask + self.action = action + + +class NWRuleSetVirtualNetworkRules(msrest.serialization.Model): + """The response from the List namespace operation. + + :param subnet: Subnet properties. + :type subnet: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Subnet + :param ignore_missing_vnet_service_endpoint: Value that indicates whether to ignore missing + Vnet Service Endpoint. + :type ignore_missing_vnet_service_endpoint: bool + """ + + _attribute_map = { + 'subnet': {'key': 'subnet', 'type': 'Subnet'}, + 'ignore_missing_vnet_service_endpoint': {'key': 'ignoreMissingVnetServiceEndpoint', 'type': 'bool'}, + } + + def __init__( + self, + *, + subnet: Optional["Subnet"] = None, + ignore_missing_vnet_service_endpoint: Optional[bool] = None, + **kwargs + ): + super(NWRuleSetVirtualNetworkRules, self).__init__(**kwargs) + self.subnet = subnet + self.ignore_missing_vnet_service_endpoint = ignore_missing_vnet_service_endpoint + + +class Operation(msrest.serialization.Model): + """A Event Hub REST API operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :param display: The object that represents the operation. + :type display: ~azure.mgmt.eventhub.v2021_06_01_preview.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + *, + display: Optional["OperationDisplay"] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = display + + +class OperationDisplay(msrest.serialization.Model): + """The object that represents the operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provider: Service provider: Microsoft.EventHub. + :vartype provider: str + :ivar resource: Resource on which the operation is performed: Invoice, etc. + :vartype resource: str + :ivar operation: Operation type: Read, write, delete, etc. + :vartype operation: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Event Hub operations. It contains a list of operations and a URL link to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of Event Hub operations supported by the Microsoft.EventHub resource + provider. + :vartype value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class PrivateEndpoint(msrest.serialization.Model): + """PrivateEndpoint information. + + :param id: The ARM identifier for Private Endpoint. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = id + + +class PrivateEndpointConnection(Resource): + """Properties of the PrivateEndpointConnection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar system_data: The system meta data relating to this resource. + :vartype system_data: ~azure.mgmt.eventhub.v2021_06_01_preview.models.SystemData + :param private_endpoint: The Private Endpoint resource for this Connection. + :type private_endpoint: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpoint + :param private_link_service_connection_state: Details about the state of the connection. + :type private_link_service_connection_state: + ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConnectionState + :param provisioning_state: Provisioning state of the Private Endpoint Connection. Possible + values include: "Creating", "Updating", "Deleting", "Succeeded", "Canceled", "Failed". + :type provisioning_state: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.EndPointProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'ConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["ConnectionState"] = None, + provisioning_state: Optional[Union[str, "EndPointProvisioningState"]] = None, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.system_data = None + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = provisioning_state + + +class PrivateEndpointConnectionListResult(msrest.serialization.Model): + """Result of the list of all private endpoint connections operation. + + :param value: A collection of private endpoint connection resources. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection] + :param next_link: A link for the next page of private endpoint connection resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateEndpointConnection"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PrivateLinkResource(msrest.serialization.Model): + """Information of the private link resource. + + :param id: Fully qualified identifier of the resource. + :type id: str + :param name: Name of the resource. + :type name: str + :param type: Type of the resource. + :type type: str + :param group_id: The private link resource group id. + :type group_id: str + :param required_members: The private link resource required member names. + :type required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + name: Optional[str] = None, + type: Optional[str] = None, + group_id: Optional[str] = None, + required_members: Optional[List[str]] = None, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.id = id + self.name = name + self.type = type + self.group_id = group_id + self.required_members = required_members + self.required_zone_names = required_zone_names + + +class PrivateLinkResourcesListResult(msrest.serialization.Model): + """Result of the List private link resources operation. + + :param value: A collection of private link resources. + :type value: list[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkResource] + :param next_link: A link for the next page of private link resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateLinkResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateLinkResourcesListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class RegenerateAccessKeyParameters(msrest.serialization.Model): + """Parameters supplied to the Regenerate Authorization Rule operation, specifies which key needs to be reset. + + All required parameters must be populated in order to send to Azure. + + :param key_type: Required. The access key to regenerate. Possible values include: "PrimaryKey", + "SecondaryKey". + :type key_type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.KeyType + :param key: Optional, if the key value provided, is set for KeyType or autogenerated Key value + set for keyType. + :type key: str + """ + + _validation = { + 'key_type': {'required': True}, + } + + _attribute_map = { + 'key_type': {'key': 'keyType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + *, + key_type: Union[str, "KeyType"], + key: Optional[str] = None, + **kwargs + ): + super(RegenerateAccessKeyParameters, self).__init__(**kwargs) + self.key_type = key_type + self.key = key + + +class Sku(msrest.serialization.Model): + """SKU parameters supplied to the create namespace operation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of this SKU. Possible values include: "Basic", "Standard", + "Premium". + :type name: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.SkuName + :param tier: The billing tier of this particular SKU. Possible values include: "Basic", + "Standard", "Premium". + :type tier: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.SkuTier + :param capacity: The Event Hubs throughput units for Basic or Standard tiers, where value + should be 0 to 20 throughput units. The Event Hubs premium units for Premium tier, where value + should be 0 to 10 premium units. + :type capacity: int + """ + + _validation = { + 'name': {'required': True}, + 'capacity': {'minimum': 0}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Union[str, "SkuName"], + tier: Optional[Union[str, "SkuTier"]] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = name + self.tier = tier + self.capacity = capacity + + +class Subnet(msrest.serialization.Model): + """Properties supplied for Subnet. + + :param id: Resource ID of Virtual Network Subnet. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + **kwargs + ): + super(Subnet, self).__init__(**kwargs) + self.id = id + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.eventhub.v2021_06_01_preview.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or + ~azure.mgmt.eventhub.v2021_06_01_preview.models.CreatedByType + :param last_modified_at: The type of identity that last modified the resource. + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class UserAssignedIdentity(msrest.serialization.Model): + """Recognized Dictionary value. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: Principal Id of user assigned identity. + :vartype principal_id: str + :ivar client_id: Client Id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedIdentityProperties(msrest.serialization.Model): + """UserAssignedIdentityProperties. + + :param user_assigned_identity: ARM ID of user Identity selected for encryption. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + user_assigned_identity: Optional[str] = None, + **kwargs + ): + super(UserAssignedIdentityProperties, self).__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/__init__.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/__init__.py new file mode 100644 index 000000000000..11daeaa7c220 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._clusters_operations import ClustersOperations +from ._configuration_operations import ConfigurationOperations +from ._namespaces_operations import NamespacesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._operations import Operations +from ._event_hubs_operations import EventHubsOperations +from ._disaster_recovery_configs_operations import DisasterRecoveryConfigsOperations +from ._consumer_groups_operations import ConsumerGroupsOperations + +__all__ = [ + 'ClustersOperations', + 'ConfigurationOperations', + 'NamespacesOperations', + 'PrivateEndpointConnectionsOperations', + 'PrivateLinkResourcesOperations', + 'Operations', + 'EventHubsOperations', + 'DisasterRecoveryConfigsOperations', + 'ConsumerGroupsOperations', +] diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_clusters_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_clusters_operations.py new file mode 100644 index 000000000000..b05ead0fc9bf --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_clusters_operations.py @@ -0,0 +1,735 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations(object): + """ClustersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_available_cluster_region( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.AvailableClustersList" + """List the quantity of available pre-provisioned Event Hubs Clusters, indexed by Azure region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AvailableClustersList, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AvailableClustersList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableClustersList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_available_cluster_region.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AvailableClustersList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_available_cluster_region.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/availableClusterRegions'} # type: ignore + + def list_by_subscription( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ClusterListResult"] + """Lists the available Event Hubs Clusters within an ARM resource group. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/clusters'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ClusterListResult"] + """Lists the available Event Hubs Clusters within an ARM resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters'} # type: ignore + + def get( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Cluster" + """Gets the resource description of the specified Event Hubs Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "_models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "_models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.Cluster"] + """Creates or updates an instance of an Event Hubs Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: Parameters for creating a eventhub cluster resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def _update_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "_models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def begin_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "_models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.Cluster"] + """Modifies mutable properties on the Event Hubs Cluster. This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: The properties of the Event Hubs Cluster which should be updated. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes an existing Event Hubs Cluster. This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}'} # type: ignore + + def list_namespaces( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.EHNamespaceIdListResult" + """List all Event Hubs Namespace IDs in an Event Hubs Dedicated Cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespaceIdListResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceIdListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceIdListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_namespaces.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EHNamespaceIdListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_namespaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/namespaces'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_configuration_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_configuration_operations.py new file mode 100644 index 000000000000..19938d2c679f --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_configuration_operations.py @@ -0,0 +1,180 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ConfigurationOperations(object): + """ConfigurationOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def patch( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "_models.ClusterQuotaConfigurationProperties" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.ClusterQuotaConfigurationProperties"] + """Replace all specified Event Hubs Cluster settings with those contained in the request body. + Leaves the settings not specified in the request body unmodified. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :param parameters: Parameters for creating an Event Hubs Cluster resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ClusterQuotaConfigurationProperties, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ClusterQuotaConfigurationProperties"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ClusterQuotaConfigurationProperties') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/quotaConfiguration/default'} # type: ignore + + def get( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ClusterQuotaConfigurationProperties" + """Get all Event Hubs Cluster settings - a collection of key/value pairs which represent the + quotas and settings imposed on the cluster. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param cluster_name: The name of the Event Hubs Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ClusterQuotaConfigurationProperties, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ClusterQuotaConfigurationProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterQuotaConfigurationProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ClusterQuotaConfigurationProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/clusters/{clusterName}/quotaConfiguration/default'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_consumer_groups_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_consumer_groups_operations.py new file mode 100644 index 000000000000..d853441eb212 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_consumer_groups_operations.py @@ -0,0 +1,348 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ConsumerGroupsOperations(object): + """ConsumerGroupsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + consumer_group_name, # type: str + parameters, # type: "_models.ConsumerGroup" + **kwargs # type: Any + ): + # type: (...) -> "_models.ConsumerGroup" + """Creates or updates an Event Hubs consumer group as a nested resource within a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :param parameters: Parameters supplied to create or update a consumer group resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConsumerGroup, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroup"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ConsumerGroup') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ConsumerGroup', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + consumer_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a consumer group from the specified Event Hub and resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + consumer_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ConsumerGroup" + """Gets a description for the specified consumer group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param consumer_group_name: The consumer group name. + :type consumer_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ConsumerGroup, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroup + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroup"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'consumerGroupName': self._serialize.url("consumer_group_name", consumer_group_name, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ConsumerGroup', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups/{consumerGroupName}'} # type: ignore + + def list_by_event_hub( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + skip=None, # type: Optional[int] + top=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ConsumerGroupListResult"] + """Gets all the consumer groups in a Namespace. An empty feed is returned if no consumer group + exists in the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param skip: Skip is only used if a previous operation returned a partial result. If a previous + response contains a nextLink element, the value of the nextLink element will include a skip + parameter that specifies a starting point to use for subsequent calls. + :type skip: int + :param top: May be used to limit the number of results to the most recent N usageDetails. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ConsumerGroupListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ConsumerGroupListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ConsumerGroupListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_event_hub.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=1000, minimum=0) + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ConsumerGroupListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_event_hub.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/consumergroups'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_disaster_recovery_configs_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_disaster_recovery_configs_operations.py new file mode 100644 index 000000000000..58b5f03c8cb7 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_disaster_recovery_configs_operations.py @@ -0,0 +1,728 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DisasterRecoveryConfigsOperations(object): + """DisasterRecoveryConfigsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def check_name_availability( + self, + resource_group_name, # type: str + namespace_name, # type: str + parameters, # type: "_models.CheckNameAvailabilityParameter" + **kwargs # type: Any + ): + # type: (...) -> "_models.CheckNameAvailabilityResult" + """Check the give Namespace name availability. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters to check availability of the given Alias name. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityParameter + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'CheckNameAvailabilityParameter') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/checkNameAvailability'} # type: ignore + + def list( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ArmDisasterRecoveryListResult"] + """Gets all Alias(Disaster Recovery configurations). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ArmDisasterRecoveryListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecoveryListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecoveryListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ArmDisasterRecoveryListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + parameters, # type: "_models.ArmDisasterRecovery" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.ArmDisasterRecovery"] + """Creates or updates a new Alias(Disaster Recovery configuration). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param parameters: Parameters required to create an Alias(Disaster Recovery configuration). + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ArmDisasterRecovery, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ArmDisasterRecovery"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ArmDisasterRecovery') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an Alias(Disaster Recovery configuration). + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ArmDisasterRecovery" + """Retrieves Alias(Disaster Recovery configuration) for primary or secondary namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ArmDisasterRecovery, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.ArmDisasterRecovery + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ArmDisasterRecovery"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore + + def break_pairing( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """This operation disables the Disaster Recovery and stops replicating changes from primary to + secondary namespaces. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.break_pairing.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + break_pairing.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/breakPairing'} # type: ignore + + def fail_over( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Invokes GEO DR failover and reconfigure the alias to point to the secondary namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.fail_over.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + fail_over.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/failover'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.AuthorizationRuleListResult"] + """Gets a list of authorization rules for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules'} # type: ignore + + def get_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AuthorizationRule" + """Gets an AuthorizationRule for a Namespace by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def list_keys( + self, + resource_group_name, # type: str + namespace_name, # type: str + alias, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AccessKeys" + """Gets the primary and secondary connection strings for the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param alias: The Disaster Recovery configuration name. + :type alias: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_event_hubs_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_event_hubs_operations.py new file mode 100644 index 000000000000..11347bae096b --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_event_hubs_operations.py @@ -0,0 +1,766 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class EventHubsOperations(object): + """EventHubsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_namespace( + self, + resource_group_name, # type: str + namespace_name, # type: str + skip=None, # type: Optional[int] + top=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.EventHubListResult"] + """Gets all the Event Hubs in a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param skip: Skip is only used if a previous operation returned a partial result. If a previous + response contains a nextLink element, the value of the nextLink element will include a skip + parameter that specifies a starting point to use for subsequent calls. + :type skip: int + :param top: May be used to limit the number of results to the most recent N usageDetails. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EventHubListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EventHubListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_namespace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=1000, minimum=0) + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('EventHubListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_namespace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + parameters, # type: "_models.Eventhub" + **kwargs # type: Any + ): + # type: (...) -> "_models.Eventhub" + """Creates or updates a new Event Hub as a nested resource within a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param parameters: Parameters supplied to create an Event Hub resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Eventhub, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Eventhub"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Eventhub') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Eventhub', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an Event Hub from the specified Namespace and resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Eventhub" + """Gets an Event Hubs description for the specified Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Eventhub, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.Eventhub + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Eventhub"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Eventhub', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.AuthorizationRuleListResult"] + """Gets the authorization rules for an Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules'} # type: ignore + + def create_or_update_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + authorization_rule_name, # type: str + parameters, # type: "_models.AuthorizationRule" + **kwargs # type: Any + ): + # type: (...) -> "_models.AuthorizationRule" + """Creates or updates an AuthorizationRule for the specified Event Hub. Creation/update of the + AuthorizationRule will take a few seconds to take effect. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: The shared access AuthorizationRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AuthorizationRule') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def get_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AuthorizationRule" + """Gets an AuthorizationRule for an Event Hub by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def delete_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an Event Hub AuthorizationRule. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def list_keys( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AccessKeys" + """Gets the ACS and SAS connection strings for the Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore + + def regenerate_keys( + self, + resource_group_name, # type: str + namespace_name, # type: str + event_hub_name, # type: str + authorization_rule_name, # type: str + parameters, # type: "_models.RegenerateAccessKeyParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.AccessKeys" + """Regenerates the ACS and SAS connection strings for the Event Hub. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param event_hub_name: The Event Hub name. + :type event_hub_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: Parameters supplied to regenerate the AuthorizationRule Keys + (PrimaryKey/SecondaryKey). + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.RegenerateAccessKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'eventHubName': self._serialize.url("event_hub_name", event_hub_name, 'str', max_length=256, min_length=1), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'RegenerateAccessKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/eventhubs/{eventHubName}/authorizationRules/{authorizationRuleName}/regenerateKeys'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_namespaces_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_namespaces_operations.py new file mode 100644 index 000000000000..25caecd61014 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_namespaces_operations.py @@ -0,0 +1,1164 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class NamespacesOperations(object): + """NamespacesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.EHNamespaceListResult"] + """Lists all the available Namespaces within a subscription, irrespective of the resource groups. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EHNamespaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('EHNamespaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/namespaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.EHNamespaceListResult"] + """Lists the available Namespaces within a resource group. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EHNamespaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('EHNamespaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + namespace_name, # type: str + parameters, # type: "_models.EHNamespace" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.EHNamespace"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EHNamespace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'EHNamespace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + namespace_name, # type: str + parameters, # type: "_models.EHNamespace" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.EHNamespace"] + """Creates or updates a namespace. Once created, this namespace's resource manifest is immutable. + This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters for creating a namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either EHNamespace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes an existing namespace. This operation also removes all associated resources under the + namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.EHNamespace" + """Gets the description of the specified namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespace, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EHNamespace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + namespace_name, # type: str + parameters, # type: "_models.EHNamespace" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.EHNamespace"] + """Creates or updates a namespace. Once created, this namespace's resource manifest is immutable. + This operation is idempotent. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: Parameters for updating a namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EHNamespace, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.EHNamespace or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EHNamespace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'EHNamespace') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EHNamespace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}'} # type: ignore + + def create_or_update_network_rule_set( + self, + resource_group_name, # type: str + namespace_name, # type: str + parameters, # type: "_models.NetworkRuleSet" + **kwargs # type: Any + ): + # type: (...) -> "_models.NetworkRuleSet" + """Create or update NetworkRuleSet for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param parameters: The Namespace IpFilterRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NetworkRuleSet, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkRuleSet"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_network_rule_set.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'NetworkRuleSet') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NetworkRuleSet', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_network_rule_set.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/networkRuleSets/default'} # type: ignore + + def get_network_rule_set( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.NetworkRuleSet" + """Gets NetworkRuleSet for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NetworkRuleSet, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.NetworkRuleSet + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkRuleSet"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_network_rule_set.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NetworkRuleSet', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_network_rule_set.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/networkRuleSets/default'} # type: ignore + + def list_authorization_rules( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.AuthorizationRuleListResult"] + """Gets a list of authorization rules for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AuthorizationRuleListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRuleListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRuleListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_authorization_rules.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('AuthorizationRuleListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules'} # type: ignore + + def create_or_update_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + authorization_rule_name, # type: str + parameters, # type: "_models.AuthorizationRule" + **kwargs # type: Any + ): + # type: (...) -> "_models.AuthorizationRule" + """Creates or updates an AuthorizationRule for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: The shared access AuthorizationRule. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AuthorizationRule') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def delete_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an AuthorizationRule for a Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.delete_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def get_authorization_rule( + self, + resource_group_name, # type: str + namespace_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AuthorizationRule" + """Gets an AuthorizationRule for a Namespace by rule name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AuthorizationRule, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AuthorizationRule + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationRule"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_authorization_rule.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AuthorizationRule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}'} # type: ignore + + def list_keys( + self, + resource_group_name, # type: str + namespace_name, # type: str + authorization_rule_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.AccessKeys" + """Gets the primary and secondary connection strings for the Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}/listKeys'} # type: ignore + + def regenerate_keys( + self, + resource_group_name, # type: str + namespace_name, # type: str + authorization_rule_name, # type: str + parameters, # type: "_models.RegenerateAccessKeyParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.AccessKeys" + """Regenerates the primary or secondary connection strings for the specified Namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param authorization_rule_name: The authorization rule name. + :type authorization_rule_name: str + :param parameters: Parameters required to regenerate the connection string. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.RegenerateAccessKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessKeys, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.AccessKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'RegenerateAccessKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/authorizationRules/{authorizationRuleName}/regenerateKeys'} # type: ignore + + def check_name_availability( + self, + parameters, # type: "_models.CheckNameAvailabilityParameter" + **kwargs # type: Any + ): + # type: (...) -> "_models.CheckNameAvailabilityResult" + """Check the give Namespace name availability. + + :param parameters: Parameters to check availability of the given Namespace name. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityParameter + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'CheckNameAvailabilityParameter') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.EventHub/checkNameAvailability'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_operations.py new file mode 100644 index 000000000000..ec879cb07bc7 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.OperationListResult"] + """Lists all of the available Event Hub REST API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.EventHub/operations'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_endpoint_connections_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..bf8467f022c6 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,384 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations(object): + """PrivateEndpointConnectionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PrivateEndpointConnectionListResult"] + """Gets the available PrivateEndpointConnections within a namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + namespace_name, # type: str + private_endpoint_connection_name, # type: str + parameters, # type: "_models.PrivateEndpointConnection" + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + """Creates or updates PrivateEndpointConnections of service namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :param parameters: Parameters supplied to update Status of PrivateEndPoint Connection to + namespace resource. + :type parameters: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + namespace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + namespace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes an existing namespace. This operation also removes all associated resources under the + namespace. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + namespace_name=namespace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + """Gets a description for the specified Private Endpoint Connection name. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :param private_endpoint_connection_name: The PrivateEndpointConnection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_link_resources_operations.py b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..64875ade3b98 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/operations/_private_link_resources_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.eventhub.v2021_06_01_preview.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + namespace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateLinkResourcesListResult" + """Gets lists of resources that supports Privatelinks. + + :param resource_group_name: Name of the resource group within the azure subscription. + :type resource_group_name: str + :param namespace_name: The Namespace name. + :type namespace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesListResult, or the result of cls(response) + :rtype: ~azure.mgmt.eventhub.v2021_06_01_preview.models.PrivateLinkResourcesListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateLinkResources'} # type: ignore diff --git a/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/py.typed b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_06_01_preview/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/eventhub/test-resources.json b/sdk/eventhub/test-resources.json index 3566eb9745f9..f66ea2ac3756 100644 --- a/sdk/eventhub/test-resources.json +++ b/sdk/eventhub/test-resources.json @@ -70,11 +70,14 @@ "eventHubsNamespace": "[concat('eh-', parameters('baseName'))]", "eventHubName": "[concat('eh-', parameters('baseName'), '-hub')]", "eventHubAuthRuleName": "[concat('eh-', parameters('baseName'), '-hub-auth-rule')]", - "storageAccount": "[concat('blb', parameters('baseName'))]", + "storageAccount": "[concat('storage', parameters('baseName'))]", "containerName": "your-blob-container-name", "defaultSASKeyName": "RootManageSharedAccessKey", "eventHubsAuthRuleResourceId": "[resourceId('Microsoft.EventHub/namespaces/authorizationRules', variables('eventHubsNamespace'), variables('defaultSASKeyName'))]", "storageAccountId": "[resourceId('Microsoft.Storage/storageAccounts', variables('storageAccount'))]", + "tablesMgmtApiVersion": "2019-04-01", + "tablesAuthorizationApiVersion": "2018-09-01-preview", + "tableDataContributorRoleId": "0a9a7e1f-b9d0-4cc4-a60d-0319b160aaa3" }, "resources": [ { @@ -140,6 +143,48 @@ } ] }, + { + "type": "Microsoft.DocumentDB/databaseAccounts", + "apiVersion": "2020-04-01", + "name": "[variables('storageAccount')]", + "location": "[parameters('location')]", + "tags": { + "defaultExperience": "Azure Table", + "hidden-cosmos-mmspecial": "", + "CosmosAccountType": "Non-Production" + }, + "kind": "GlobalDocumentDB", + "properties": { + "publicNetworkAccess": "Enabled", + "enableAutomaticFailover": false, + "enableMultipleWriteLocations": false, + "isVirtualNetworkFilterEnabled": false, + "virtualNetworkRules": [], + "disableKeyBasedMetadataWriteAccess": false, + "enableFreeTier": false, + "enableAnalyticalStorage": false, + "databaseAccountOfferType": "Standard", + "consistencyPolicy": { + "defaultConsistencyLevel": "BoundedStaleness", + "maxIntervalInSeconds": 86400, + "maxStalenessPrefix": 1000000 + }, + "locations": [ + { + "locationName": "[parameters('location')]", + "provisioningState": "Succeeded", + "failoverPriority": 0, + "isZoneRedundant": false + } + ], + "capabilities": [ + { + "name": "EnableTable" + } + ], + "ipRules": [] + } + }, { "type": "Microsoft.Authorization/roleAssignments", "apiVersion": "2019-04-01-preview", @@ -159,6 +204,15 @@ "principalId": "[parameters('testApplicationOid')]", "scope": "[resourceGroup().id]" } + }, + { + "type": "Microsoft.Authorization/roleAssignments", + "apiVersion": "[variables('tablesAuthorizationApiVersion')]", + "name": "[guid(concat('tableDataContributorRoleId', resourceGroup().id))]", + "properties": { + "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('tableDataContributorRoleId'))]", + "principalId": "[parameters('testApplicationOid')]" + } } ], "outputs": { @@ -197,6 +251,14 @@ "AZURE_STORAGE_ACCESS_KEY":{ "type": "string", "value": "[listKeys(variables('storageAccountId'), providers('Microsoft.Storage', 'storageAccounts').apiVersions[0]).keys[0].value]" + }, + "AZURE_TABLES_CONN_STR": { + "type": "string", + "value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccount'), ';AccountKey=', listKeys(variables('storageAccountId'), providers('Microsoft.Storage', 'storageAccounts').apiVersions[0]).keys[0].value, ';EndpointSuffix=', parameters('storageEndpointSuffix'))]" + }, + "AZURE_COSMOS_CONN_STR": { + "type": "string", + "value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccount'), ';AccountKey=', listKeys(resourceId('Microsoft.DocumentDB/databaseAccounts', variables('storageAccount')), '2020-04-01').primaryMasterKey, ';TableEndpoint=https://', variables('storageAccount'), '.table.cosmos.azure.com:443/')]" } } } diff --git a/sdk/identity/azure-identity/Troubleshoot.md b/sdk/identity/azure-identity/Troubleshoot.md new file mode 100644 index 000000000000..0b6b938a4168 --- /dev/null +++ b/sdk/identity/azure-identity/Troubleshoot.md @@ -0,0 +1,218 @@ +# Troubleshooting Azure Identity Authentication Issues + +The Azure Identity SDK offers various `TokenCredential` implementations. These implementations typically throw `CredentialUnavailableError` and `ClientAuthenticationError` exceptions. +The `CredentialUnavailableError` indicates that the credential cannot execute in the current environment setup due to lack of required configuration. +The `ClientAuthenticationError` indicates that the credential was able to run/execute but ran into an authentication issue from the server's end. This can happen due to invalid configuration/details passed in to the credential at construction time. +This troubleshooting guide covers mitigation steps to resolve these exceptions thrown by various `TokenCredential` implementations in the Azure Identity Python client library. + +## Table of contents + +- [Troubleshooting Default Azure Credential Authentication Issues](#troubleshooting-default-azure-credential-authentication-issues) +- [Troubleshooting Environment Credential Authentication Issues](#troubleshooting-environment-credential-authentication-issues) +- [Troubleshooting Service Principal Authentication Issues](#troubleshooting-service-principal-authentication-issues) +- [Troubleshooting Username Password Authentication Issues](#troubleshooting-username-password-authentication-issues) +- [Troubleshooting Managed Identity Authentication Issues](#troubleshooting-managed-identity-authentication-issues) +- [Troubleshooting Visual Studio Code Authentication Issues](#troubleshooting-visual-studio-code-authentication-issues) +- [Troubleshooting Azure CLI Authentication Issues](#troubleshooting-azure-cli-authentication-issues) +- [Troubleshooting Azure Powershell Authentication Issues](#troubleshooting-azure-powershell-authentication-issues) + +## Troubleshooting Default Azure Credential Authentication Issues + +### Credential Unavailable Error + +The `DefaultAzureCredential` attempts to retrieve an access token by sequentially invoking a chain of credentials. The `ClientAuthenticationError` in this scenario signifies that all the credentials in the chain failed to retrieve the token in the current environment setup/configuration. You need to follow the configuration instructions for the respective credential you're looking to use via `DefaultAzureCredential` chain, so that the credential can work in your environment. + +Please follow the configuration instructions in the `Credential Unavailable Error` section of the troubleshooting guidelines below for the respective credential/authentication type you want to use via `DefaultAzureCredential`: + +| Credential Type | Troubleshoot Guide | +| --- | --- | +| Environment Credential | [Environment Credential Troubleshooting Guide](#troubleshooting-environment-credential-authentication-issues) | +| Managed Identity Credential | [Managed Identity Troubleshooting Guide](#troubleshooting-managed-identity-authentication-issues) | +| Visual Studio Code Credential | [Visual Studio Code Troubleshooting Guide](#troubleshooting-visual-studio-code-authentication-issues) | +| Azure CLI Credential | [Azure CLI Troubleshooting Guide](#troubleshooting-azure-cli-authentication-issues) | +| Azure Powershell Credential | [Azure Powershell Troubleshooting Guide](#troubleshooting-azure-powershell-authentication-issues) | + +## Troubleshooting Environment Credential Authentication Issues + +### Credential Unavailable Error + +#### Environment variables not configured + +The `EnvironmentCredential` supports Service Principal authentication and Username + Password authentication. To utilize the desired way of authentication via `EnvironmentCredential`, you need to ensure the environment variables below are configured properly and the application is able to read them. + +##### Service principal with secret + +| Variable Name | Value | +| --- | --- | +AZURE_CLIENT_ID | ID of an Azure Active Directory application. | +AZURE_TENANT_ID |ID of the application's Azure Active Directory tenant. | +AZURE_CLIENT_SECRET | One of the application's client secrets. | + +##### Service principal with certificate + +| Variable name | Value | +| --- | --- | +AZURE_CLIENT_ID |ID of an Azure Active Directory application. | +AZURE_TENANT_ID | ID of the application's Azure Active Directory tenant. | +AZURE_CLIENT_CERTIFICATE_PATH | Path to a PEM-encoded or PKCS12 certificate file including private key (without password protection). | + +##### Username and password + +| Variable name | Value | +| --- | --- | +AZURE_CLIENT_ID | ID of an Azure Active Directory application. | +AZURE_USERNAME | A username (usually an email address). | +AZURE_PASSWORD | The associated password for the given username. | + +### Client Authentication Error + +The `EnvironmentCredential` supports Service Principal authentication and Username + Password authentication. +Please follow the troubleshooting guidelines below for the respective authentication which you tried and failed. + +| Authentication Type | Troubleshoot Guide | +| --- | --- | +| Service Principal | [Service Principal Auth Troubleshooting Guide](#troubleshooting-username-password-authentication-issues) | +| Username Password | [Username Password Auth Troubleshooting Guide](#troubleshooting-username-password-authentication-issues) | + +## Troubleshooting Username Password Authentication Issues + +### Two Factor Authentication Required Error + +The `UsernamePassword` credential works only for users whose two factor authentication has been disabled in Azure Active Directory. You can change the Multi Factor Authentication in Azure Portal by following the steps [here](https://docs.microsoft.com/azure/active-directory/authentication/howto-mfa-userstates#change-the-status-for-a-user). + +## Troubleshooting Service Principal Authentication Issues + +### Illegal/Invalid Argument Issues + +#### Client Id + +The Client Id is the application Id of the registered application / service principal in Azure Active Directory. +It is a required parameter for `ClientSecretCredential` and `ClientCertificateCredential`. If you have already created your service principal +then you can retrieve the client/app id by following the instructions [here](https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal#get-tenant-and-app-id-values-for-signing-in). + +#### Tenant Id + +The tenant id is te Global Unique Identifier (GUID) that identifies your organization. It is a required parameter for +`ClientSecretCredential` and `ClientCertificateCredential`. If you have already created your service principal +then you can retrieve the client/app id by following the instructions [here](https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal#get-tenant-and-app-id-values-for-signing-in). + +### Client Secret Credential Issues + +#### Client Secret Argument + +The client secret is the secret string that the application uses to prove its identity when requesting a token; this can also can be referred to as an application password. +If you have already created a service principal you can follow the instructions [here](https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal#option-2-create-a-new-application-secret) to create a client secret for your application. + +### Client Certificate Credential Issues + +#### Client Certificate Argument + +The `Client Certificate Credential` accepts `pfx` and `pem` certificates. The certificate needs to be associated with your registered application/service principal. To create and associate a certificate with your registered app, please follow the instructions [here](https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal#option-1-upload-a-certificate). + +### Create a new service principal + +Please follow the instructions [here](https://docs.microsoft.com/cli/azure/create-an-azure-service-principal-azure-cli) to create a new service principal. + +## Troubleshooting Managed Identity Authentication Issues + +### Credential Unavailable Error + +#### Connection Timed Out / Connection could not be established / Target Environment could not be determined + +Currently azure-identity supports [managed identity authentication](https://docs.microsoft.com/azure/active-directory/managed-identities-azure-resources/overview) +in the below listed Azure services; ensure you're running your application on one of these resources and have enabled the Managed Identity on +them by following the instructions at their configuration links below. + +Azure Service | Managed Identity Configuration +--- | --- | +[Azure Virtual Machines](https://docs.microsoft.com/azure/active-directory/managed-identities-azure-resources/how-to-use-vm-token) | [Configuration Instructions](https://docs.microsoft.com/azure/active-directory/managed-identities-azure-resources/qs-configure-portal-windows-vm) +[Azure App Service](https://docs.microsoft.com/azure/app-service/overview-managed-identity?tabs=python) | [Configuration Instructions](https://docs.microsoft.com/azure/app-service/overview-managed-identity?tabs=python) +[Azure Kubernetes Service](https://docs.microsoft.com/azure/aks/use-managed-identity) | [Configuration Instructions](https://docs.microsoft.com/azure/aks/use-managed-identity) +[Azure Cloud Shell](https://docs.microsoft.com/azure/cloud-shell/msi-authorization) | | +[Azure Arc](https://docs.microsoft.com/azure/azure-arc/servers/managed-identity-authentication) | [Configuration Instructions](https://docs.microsoft.com/azure/azure-arc/servers/security-overview#using-a-managed-identity-with-arc-enabled-servers) +[Azure Service Fabric](https://docs.microsoft.com/azure/service-fabric/concepts-managed-identity) | [Configuration Instructions](https://docs.microsoft.com/azure/service-fabric/configure-existing-cluster-enable-managed-identity-token-service) + +## Troubleshooting Visual Studio Code Authentication Issues + +### Credential Unavailable Error + +#### Failed To Read VS Code Credentials / Authenticate via Azure Tools plugin in VS Code + +THe `VS Code Credential` failed to read the credential details from the cache. + +The Visual Studio Code authentication is handled by an integration with the Azure Account extension. +To use this form of authentication, ensure that you have installed the Azure Account extension, +then use View > Command Palette to execute the Azure: Sign In command. This command opens a browser window and displays a page that allows you +to sign in to Azure. After you've completed the login process, you can close the browser as directed. Running your application +(either in the debugger or anywhere on the development machine) will use the credential from your sign-in. + +If you already had the Azure Account extension installed and had logged in to your account. Then try logging out and logging in again, as +that will re-populate the cache on the disk and potentially mitigate the error you're getting. + +#### Msal Interaction Required Error + +THe `VS Code Credential` was able to read the cached credentials from the cache but the cached token is likely expired. +Log into the Azure Account extension by via View > Command Palette to execute the Azure: Sign In command in the VS Code IDE. + +#### ADFS Tenant Not Supported + +The ADFS Tenants are not supported via the Azure Account extension in VS Code currently. +The supported clouds are: + +Azure Cloud | Cloud Authority Host +--- | --- | +AZURE PUBLIC CLOUD | https://login.microsoftonline.com/ +AZURE GERMANY | https://login.microsoftonline.de/ +AZURE CHINA | https://login.chinacloudapi.cn/ +AZURE GOVERNMENT | https://login.microsoftonline.us/ + +## Troubleshooting Azure CLI Authentication Issues + +### Credential Unavailable Error + +#### Azure CLI Not Installed + +To use Azure CLI credential, the Azure CLI needs to be installed, please follow the instructions [here](https://docs.microsoft.com/cli/azure/install-azure-cli) +to install it for your platform and then try running the credential again. + +#### Azure account not logged in + +`AzureCliCredential` authenticates as the identity currently logged in to Azure CLI. +You need to login to your account in Azure CLI via `az login` command. You can further read instructions to [Sign in with Azure CLI](https://docs.microsoft.com/cli/azure/authenticate-azure-cli). +Once logged in try running the credential again. + +### Illegal State + +#### Safe Working Directory Not Located + +The `Azure CLI Credential` was not able to locate a value for System Environment property `SystemRoot` to execute in. +Please ensure the `SystemRoot` environment variable is configured to a safe working directory and then try running the credential again. + +## Troubleshooting Azure Powershell Authentication Issues + +### Credential Unavailable Error + +#### Powershell not installed + +Please ensure PowerShell is installed on your platform by following the instructions [here](https://docs.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.1). + +#### Azure Az Module Not Installed + +Please follow the instructions [here](https://docs.microsoft.com/powershell/azure/install-az-ps) +to install the Azure Az PowerShell module. + +#### Azure account not logged in + +Log in via the `Connect-AzAccount` command. See [Sign in with Azure Powershell](https://docs.microsoft.com/powershell/azure/authenticate-azureps) for more information. + +#### Deserialization error + +The `Azure Powershell Credential` was able to retrieve a response from the Azure Powershell when attempting to get an access token but failed +to parse that response. +In your local powershell window, run the following command to ensure that Azure Powershell is returning an access token in correct format. + +```pwsh +Get-AzAccessToken -ResourceUrl "" +``` + +In the event above command is not working properly, follow the instructions to resolve the Azure Powershell issue being faced and then try running the credential again. diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/azure_cli.py b/sdk/identity/azure-identity/azure/identity/_credentials/azure_cli.py index 24e160d768c4..d535a286adb7 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/azure_cli.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/azure_cli.py @@ -83,7 +83,10 @@ def get_token(self, *scopes, **kwargs): token = parse_token(output) if not token: sanitized_output = sanitize_output(output) - raise ClientAuthenticationError(message="Unexpected output from Azure CLI: '{}'".format(sanitized_output)) + raise ClientAuthenticationError( + message="Unexpected output from Azure CLI: '{}'. \n" + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https://aka.ms/azsdk/python/identity/azclicredential/troubleshoot.".format(sanitized_output)) return token diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/azure_powershell.py b/sdk/identity/azure-identity/azure/identity/_credentials/azure_powershell.py index 8ce657c6e261..17869fbde253 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/azure_powershell.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/azure_powershell.py @@ -119,7 +119,10 @@ def run_command_line(command_line): # (handling Exception here because subprocess.SubprocessError and .TimeoutExpired were added in 3.3) if proc and not proc.returncode: proc.kill() - error = CredentialUnavailableError(message="Failed to invoke PowerShell") + error = CredentialUnavailableError( + message="Failed to invoke PowerShell.\n" + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https://aka.ms/azsdk/python/identity/powershellcredential/troubleshoot.") six.raise_from(error, ex) raise_for_error(proc.returncode, stdout, stderr) diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/chained.py b/sdk/identity/azure-identity/azure/identity/_credentials/chained.py index 35936acb7679..6002b87d82f5 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/chained.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/chained.py @@ -101,6 +101,8 @@ def get_token(self, *scopes, **kwargs): # pylint:disable=unused-argument within_credential_chain.set(False) attempts = _get_error_message(history) - message = self.__class__.__name__ + " failed to retrieve a token from the included credentials." + attempts + message = self.__class__.__name__ + " failed to retrieve a token from the included credentials." + attempts \ + + "\nTo mitigate this issue, please refer to the troubleshooting guidelines here at " \ + "https://aka.ms/azsdk/python/identity/defaultazurecredential/troubleshoot." _LOGGER.warning(message) raise ClientAuthenticationError(message=message) diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/environment.py b/sdk/identity/azure-identity/azure/identity/_credentials/environment.py index c874646218de..8d0e7401d8b2 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/environment.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/environment.py @@ -132,7 +132,9 @@ def get_token(self, *scopes, **kwargs): # pylint:disable=unused-argument """ if not self._credential: message = ( - "EnvironmentCredential authentication unavailable. Environment variables are not fully configured." + "EnvironmentCredential authentication unavailable. Environment variables are not fully configured.\n" + "Visit https://aka.ms/azsdk/python/identity/environmentcredential/troubleshoot to troubleshoot." + "this issue." ) raise CredentialUnavailableError(message=message) return self._credential.get_token(*scopes, **kwargs) diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/managed_identity.py b/sdk/identity/azure-identity/azure/identity/_credentials/managed_identity.py index d602c4d4d4e3..c1ce731dd21c 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/managed_identity.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/managed_identity.py @@ -110,5 +110,10 @@ def get_token(self, *scopes, **kwargs): """ if not self._credential: - raise CredentialUnavailableError(message="No managed identity endpoint found.") + raise CredentialUnavailableError( + message="No managed identity endpoint found. \n" + "The Target Azure platform could not be determined from environment variables. \n" + "Visit https://aka.ms/azsdk/python/identity/managedidentitycredential/troubleshoot to " + "troubleshoot this issue." + ) return self._credential.get_token(*scopes, **kwargs) diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_cli.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_cli.py index d01a3619462d..944a2211d023 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_cli.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_cli.py @@ -71,7 +71,10 @@ async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken": token = parse_token(output) if not token: sanitized_output = sanitize_output(output) - raise ClientAuthenticationError(message="Unexpected output from Azure CLI: '{}'".format(sanitized_output)) + raise ClientAuthenticationError( + message="Unexpected output from Azure CLI: '{}'. \n" + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https://aka.ms/azsdk/python/identity/azclicredential/troubleshoot.".format(sanitized_output)) return token diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_powershell.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_powershell.py index c99433bb13d4..cfb3cd4331a1 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_powershell.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/azure_powershell.py @@ -83,11 +83,17 @@ async def run_command_line(command_line: "List[str]") -> str: except OSError as ex: # failed to execute "cmd" or "/bin/sh"; Azure PowerShell may or may not be installed - error = CredentialUnavailableError(message='Failed to execute "{}"'.format(command_line[0])) + error = CredentialUnavailableError( + message='Failed to execute "{}".\n' + 'To mitigate this issue, please refer to the troubleshooting guidelines here at ' + 'https://aka.ms/azsdk/python/identity/powershellcredential/troubleshoot.'.format(command_line[0])) raise error from ex except asyncio.TimeoutError as ex: proc.kill() - raise CredentialUnavailableError(message="Timed out waiting for Azure PowerShell") from ex + raise CredentialUnavailableError( + message="Timed out waiting for Azure PowerShell.\n" + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https://aka.ms/azsdk/python/identity/powershellcredential/troubleshoot.") from ex decoded_stdout = stdout.decode() diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/chained.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/chained.py index c0e86ebc3397..c0a6a9ba5b89 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/chained.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/chained.py @@ -78,5 +78,7 @@ async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken": within_credential_chain.set(False) attempts = _get_error_message(history) - message = self.__class__.__name__ + " failed to retrieve a token from the included credentials." + attempts + message = self.__class__.__name__ + " failed to retrieve a token from the included credentials." + attempts \ + + "\nTo mitigate this issue, please refer to the troubleshooting guidelines here at " \ + "https://aka.ms/azsdk/python/identity/defaultazurecredential/troubleshoot." raise ClientAuthenticationError(message=message) diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/environment.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/environment.py index 61a0dc795c89..d4c0bdff2047 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/environment.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/environment.py @@ -100,7 +100,9 @@ async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken": """ if not self._credential: message = ( - "EnvironmentCredential authentication unavailable. Environment variables are not fully configured." + "EnvironmentCredential authentication unavailable. Environment variables are not fully configured.\n" + "Visit https://aka.ms/azsdk/python/identity/environmentcredential/troubleshoot to troubleshoot." + "this issue." ) raise CredentialUnavailableError(message=message) return await self._credential.get_token(*scopes, **kwargs) diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/managed_identity.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/managed_identity.py index dabb1be0b21f..caaf8a120398 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/managed_identity.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/managed_identity.py @@ -103,5 +103,10 @@ async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken": :raises ~azure.identity.CredentialUnavailableError: managed identity isn't available in the hosting environment """ if not self._credential: - raise CredentialUnavailableError(message="No managed identity endpoint found.") + raise CredentialUnavailableError( + message="No managed identity endpoint found. \n" + "The Target Azure platform could not be determined from environment variables. " + "Visit https://aka.ms/azsdk/python/identity/managedidentitycredential/troubleshoot to " + "troubleshoot this issue." + ) return await self._credential.get_token(*scopes, **kwargs) diff --git a/sdk/keyvault/test-resources.json b/sdk/keyvault/test-resources.json index d22bd550fd57..cadd04d8b6d5 100644 --- a/sdk/keyvault/test-resources.json +++ b/sdk/keyvault/test-resources.json @@ -23,6 +23,12 @@ "description": "The client OID to grant access to test resources." } }, + "provisionerApplicationOid": { + "type": "string", + "metadata": { + "description": "The provisioner OID to grant access to test resources." + } + }, "location": { "type": "string", "defaultValue": "[resourceGroup().location]", @@ -188,9 +194,7 @@ }, "properties": { "tenantId": "[parameters('tenantId')]", - "initialAdminObjectIds": [ - "[parameters('testApplicationOid')]" - ], + "initialAdminObjectIds": "[union(array(parameters('testApplicationOid')), array(parameters('provisionerApplicationOid')))]", "enablePurgeProtection": false, "enableSoftDelete": true, "publicNetworkAccess": "Enabled", diff --git a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/README.md b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/README.md index bee877a49cf8..d47b1327cf86 100644 --- a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/README.md +++ b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/README.md @@ -14,6 +14,7 @@ These code samples show common champion scenario operations with the AzureMonito * Azure Service Bus Receive: [sample_servicebus_receive.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_servicebus_receive.py) * Azure Storage Blob Create Container: [sample_storage.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_storage.py) * Client: [sample_client.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_client.py) +* Jaeger: [sample_jaeger.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_jaeger.py) * Trace: [sample_trace.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_trace.py) * Server: [sample_server.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_server.py) @@ -63,6 +64,60 @@ $ python sample_server.py * Open http://localhost:8080/ +### Jaeger + +* The Jaeger project provides an all-in-one Docker container with a UI, database, and consumer. Run the following command to start Jaeger: + +```sh +$ docker run -p 16686:16686 -p 6831:6831/udp jaegertracing/all-in-one +``` + +* This command starts Jaeger locally on port 16686 and exposes the Jaeger thrift agent on port 6831. You can visit Jaeger at http://localhost:16686. + +* Install the OpenTelemetry Jaeger Exporter + +```sh +$ pip install opentelemetry-exporter-jaeger +``` + +* Update `APPLICATIONINSIGHTS_CONNECTION_STRING` environment variable + +* Run the sample + +```sh +$ # from this directory +$ python sample_jaeger.py +``` + +* You should be able to see your traces in the Jaeger backend as well as Azure Monitor application insights backend. + +### Collector + +* Start the Collector locally to see how the Collector works in practice. + +* From the same folder as collector/otel-collector-config.yaml and collector/docker-compose.yml, start the Docker container. + +```sh +$ docker-compose up +``` + +* Install the OpenTelemetry OTLP Exporter + +```sh +$ pip install opentelemetry-exporter-otlp +``` + +* Update `APPLICATIONINSIGHTS_CONNECTION_STRING` environment variable + +* Run the sample + +```sh +# from collector directory +$ python sample_collector.py +``` + +* You should be able to see your traces in the Zipkin backend as well as Azure Monitor application insights backend. + ### Azure Service Bus Send The following sample assumes that you have setup an Azure Service Bus [namespace](https://docs.microsoft.com/azure/service-bus-messaging/service-bus-quickstart-portal). diff --git a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/docker-compose.yml b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/docker-compose.yml new file mode 100644 index 000000000000..711b913453e8 --- /dev/null +++ b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/docker-compose.yml @@ -0,0 +1,18 @@ +version: "2" +services: + + # Zipkin + zipkin-all-in-one: + image: openzipkin/zipkin:latest + ports: + - "9411:9411" + + otel-collector: + image: otel/opentelemetry-collector:latest + command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"] + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + ports: + - "4317:4317" # OTLP gRPC receiver + depends_on: + - zipkin-all-in-one diff --git a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/otel-collector-config.yaml b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/otel-collector-config.yaml new file mode 100644 index 000000000000..9613f66ada52 --- /dev/null +++ b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/otel-collector-config.yaml @@ -0,0 +1,21 @@ +receivers: + otlp: + protocols: + grpc: + http: +exporters: + logging: + loglevel: debug + + zipkin: + endpoint: "http://zipkin-all-in-one:9411/api/v2/spans" + format: proto + +processors: + batch: +service: + pipelines: + traces: + receivers: [otlp] + exporters: [logging, zipkin] + processors: [batch] diff --git a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/sample_collector.py b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/sample_collector.py new file mode 100644 index 000000000000..a042c120eb48 --- /dev/null +++ b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/collector/sample_collector.py @@ -0,0 +1,35 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +An example to show an application using Opentelemetry tracing api and sdk with the OpenTelemetry Collector +and the Azure monitor exporter. +Telemetry is exported to application insights with the AzureMonitorTraceExporter and Zipkin with the +OTLP Span exporter. +""" +import os +from opentelemetry import trace + +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter + +trace.set_tracer_provider( + TracerProvider( + resource=Resource.create({SERVICE_NAME: "my-zipkin-service"}) + ) +) +tracer = trace.get_tracer(__name__) + +exporter = AzureMonitorTraceExporter.from_connection_string( + os.environ["APPLICATIONINSIGHTS_CONNECTION_STRING"] +) +otlp_exporter = OTLPSpanExporter(endpoint="http://localhost:4317") +span_processor = BatchSpanProcessor(otlp_exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +with tracer.start_as_current_span("test"): + print("Hello world!") +input(...) \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_jaeger.py b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_jaeger.py new file mode 100644 index 000000000000..ae6a4f43e764 --- /dev/null +++ b/sdk/monitor/azure-monitor-opentelemetry-exporter/samples/traces/sample_jaeger.py @@ -0,0 +1,37 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +An example to show an application using Opentelemetry tracing api and sdk with multiple exporters. +Telemetry is exported to application insights with the AzureMonitorTraceExporter and Jaeger backend with the JaegerExporter. +""" +import os +from opentelemetry import trace +from opentelemetry.exporter.jaeger.thrift import JaegerExporter +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter + + +exporter = AzureMonitorTraceExporter.from_connection_string( + os.environ["APPLICATIONINSIGHTS_CONNECTION_STRING"] +) + +jaeger_exporter = JaegerExporter( + agent_host_name="localhost", + agent_port=6831, +) + +# Service name needs to be populated for Jaeger to see traces +trace.set_tracer_provider( + TracerProvider( + resource=Resource.create({SERVICE_NAME: "my-jaeger-service"}) + ) +) +tracer = trace.get_tracer(__name__) +span_processor = BatchSpanProcessor(exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +with tracer.start_as_current_span("hello"): + print("Hello, World!") diff --git a/sdk/monitor/azure-monitor-query/CHANGELOG.md b/sdk/monitor/azure-monitor-query/CHANGELOG.md index 8e847161dfb3..5f2d861f2b22 100644 --- a/sdk/monitor/azure-monitor-query/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-query/CHANGELOG.md @@ -1,5 +1,23 @@ # Release History +## 1.0.0b5 (Unreleased) + +### Features Added + +- Added `QueryPartialErrorException` and `LogsQueryError` to handle errors. +- Added `partial_error` and `is_error` attributes to `LogsQueryResult`. +- Added an option `allow_partial_errors` that defaults to False, which can be set to not throw if there are any partial errors. +- Added a new `LogsTableRow` type that represents a single row in a table. + +### Breaking Changes + +- `LogsQueryResult` now iterates over the tables directly as a convinience. +- `metric_namespace` is renamed to `namespace` and is a keyword-only argument in `list_metric_definitions` API. + +### Bugs Fixed + +### Other Changes + ## 1.0.0b4 (2021-09-09) ### Features Added diff --git a/sdk/monitor/azure-monitor-query/README.md b/sdk/monitor/azure-monitor-query/README.md index 16ed4a04e214..82ea4e8cfa0d 100644 --- a/sdk/monitor/azure-monitor-query/README.md +++ b/sdk/monitor/azure-monitor-query/README.md @@ -1,38 +1,43 @@ # Azure Monitor Query client library for Python -Azure Monitor helps you maximize the availability and performance of your apps. It delivers a comprehensive solution for collecting, analyzing, and acting on telemetry from your cloud and on-premises environments. +The Azure Monitor Query client library is used to execute read-only queries against [Azure Monitor][azure_monitor_overview]'s two data platforms: -All data collected by Azure Monitor fits into one of two fundamental types: +- [Logs](https://docs.microsoft.com/azure/azure-monitor/logs/data-platform-logs) - Collects and organizes log and performance data from monitored resources. Data from different sources such as platform logs from Azure services, log and performance data from virtual machines agents, and usage and performance data from apps can be consolidated into a single [Azure Log Analytics workspace](https://docs.microsoft.com/azure/azure-monitor/logs/data-platform-logs#log-analytics-workspaces). The various data types can be analyzed together using the [Kusto Query Language][kusto_query_language]. +- [Metrics](https://docs.microsoft.com/azure/azure-monitor/essentials/data-platform-metrics) - Collects numeric data from monitored resources into a time series database. Metrics are numerical values that are collected at regular intervals and describe some aspect of a system at a particular time. Metrics are lightweight and capable of supporting near real-time scenarios, making them particularly useful for alerting and fast detection of issues. -- **Metrics** - Numerical values that describe some aspect of a system at a particular time. They're lightweight and can support near real-time scenarios. -- **Logs** - Disparate types of data organized into records with different sets of properties for each type. Performance data and telemetry such as events, exceptions, and traces are stored as logs. +**Resources:** -To programmatically analyze these data sources, the Azure Monitor Query client library can be used. - -[Source code][python-query-src] | [Package (PyPI)][python-query-pypi] | [API reference documentation][python-query-ref-docs] | [Product documentation][python-query-product-docs] | [Samples][python-query-samples] | [Changelog][python-query-changelog] +- [Source code][source] +- [Package (PyPI)][package] +- [API reference documentation][python-query-ref-docs] +- [Service documentation][azure_monitor_overview] +- [Samples][samples] +- [Change log][changelog] ## Getting started ### Prerequisites -- Python 2.7, or 3.6 or later. -- An [Azure subscription][azure_subscription]. +- Python 2.7, or 3.6 or later +- An [Azure subscription][azure_subscription] +- To query Logs, you need an [Azure Log Analytics workspace][azure_monitor_create_using_portal]. +- To query Metrics, you need an Azure resource of any kind (Storage Account, Key Vault, Cosmos DB, etc.). ### Install the package Install the Azure Monitor Query client library for Python with [pip][pip]: ```bash -pip install azure-monitor-query --pre +pip install azure-monitor-query ``` ### Create the client -To interact with the Azure Monitor service, create an instance of a token credential. Use that instance when creating a `LogsQueryClient` or `MetricsQueryClient`. +An authenticated client is required to query Logs or Metrics. The library includes both synchronous and asynchronous forms of the clients. To authenticate, create an instance of a token credential. Use that instance when creating a `LogsQueryClient` or `MetricsQueryClient`. The following examples use `DefaultAzureCredential` from the [azure-identity](https://pypi.org/project/azure-identity/) package. #### Synchronous clients -Consider the following example, which creates synchronous clients for both logs and metrics querying: +Consider the following example, which creates synchronous clients for both Logs and Metrics querying: ```python from azure.identity import DefaultAzureCredential @@ -56,37 +61,23 @@ async_logs_client = LogsQueryClient(credential) async_metrics_client = MetricsQueryClient(credential) ``` -## Key concepts - -### Logs - -Azure Monitor Logs collects and organizes log and performance data from monitored resources. Data from different sources can be consolidated into a single workspace. Examples of data sources include: - -- Platform logs from Azure services. -- Log and performance data from virtual machine agents. -- Usage and performance data from apps. +### Execute the query -#### Azure Log Analytics workspaces +For examples of Logs and Metrics queries, see the [Examples](#examples) section. -Data collected by Azure Monitor Logs is stored in one or more [Log Analytics workspaces](https://docs.microsoft.com/azure/azure-monitor/logs/data-platform-logs#log-analytics-workspaces). The workspace defines the: - -- Geographic location of the data. -- Access rights defining which users can access data. -- Configuration settings, such as the pricing tier and data retention. - -#### Log queries +## Key concepts -Data from the disparate sources can be analyzed together using [Kusto Query Language (KQL)](https://docs.microsoft.com/azure/data-explorer/kusto/query/)—the same query language used by [Azure Data Explorer](https://docs.microsoft.com/azure/data-explorer/data-explorer-overview). Data is retrieved from a Log Analytics workspace using a KQL query—a read-only request to process data and return results. For more information, see [Log queries in Azure Monitor](https://docs.microsoft.com/azure/azure-monitor/logs/log-query-overview). +### Logs query rate limits and throttling -### Metrics +Each Azure Active Directory user is able to make up to 200 requests per 30 seconds, with no cap on the total calls per day. If requests are made at a rate higher than this, these requests will receive HTTP status code 429 (Too Many Requests) along with the `Retry-After: ` header. The header indicates the number of seconds until requests to this app are likely to be accepted. -Azure Monitor Metrics collects numeric data from monitored resources into a time series database. Metrics are collected at regular intervals and describe some aspect of a system at a particular time. Metrics in Azure Monitor are lightweight and can support near real-time scenarios. They're useful for alerting and fast detection of issues. Metrics can be: +In addition to call rate limits and daily quota caps, there are limits on queries themselves. Queries cannot: -- Analyzed interactively with [Metrics Explorer](https://docs.microsoft.com/azure/azure-monitor/essentials/metrics-getting-started). -- Used to receive notifications with an alert when a value crosses a threshold. -- Visualized in a workbook or dashboard. +- Return more than 500,000 rows. +- Return more than 64,000,000 bytes (~61 MiB total data). +- Run longer than 10 minutes by default. See this for details. -#### Metrics data structure +### Metrics data structure Each set of metric values is a time series with the following characteristics: @@ -99,19 +90,20 @@ Each set of metric values is a time series with the following characteristics: ## Examples -- [Single logs query](#single-logs-query) +- [Logs query](#logs-query) - [Specify timespan](#specify-timespan) - - [Set logs query timeout](#set-logs-query-timeout) + - [Handle logs query response](#handle-logs-query-response) - [Batch logs query](#batch-logs-query) -- [Query metrics](#query-metrics) -- [Handle metrics response](#handle-metrics-response) - - [Example of handling response](#example-of-handling-response) -- [Advanced scenarios](#advanced-scenarios) +- [Advanced logs query scenarios](#advanced-logs-query-scenarios) + - [Set logs query timeout](#set-logs-query-timeout) - [Query multiple workspaces](#query-multiple-workspaces) +- [Metrics query](#metrics-query) + - [Handle metrics query response](#handle-metrics-query-response) + - [Example of handling response](#example-of-handling-response) -### Single logs query +### Logs query -This example shows getting a log query. To handle the response and view it in a tabular form, the [pandas](https://pypi.org/project/pandas/) library is used. See the [samples][python-query-samples] if you choose not to use pandas. +This example shows getting a logs query. To handle the response and view it in a tabular form, the [pandas](https://pypi.org/project/pandas/) library is used. See the [samples][samples] if you choose not to use pandas. #### Specify timespan @@ -150,29 +142,45 @@ for table in response.tables: print(df) ``` -#### Set logs query timeout +#### Handle logs query response -The following example shows setting a server timeout in seconds. A gateway timeout is raised if the query takes more time than the mentioned timeout. The default is 180 seconds and can be set up to 10 minutes (600 seconds). +The `query` API returns the `LogsQueryResult` while the `batch_query` API returns list of `LogsQueryResult`. Here's a hierarchy of the response: + +``` +LogsQueryResult +|---statistics +|---visualization +|---error +|---tables (list of `LogsTable` objects) + |---name + |---rows + |---columns (list of `LogsTableColumn` objects) + |---name + |---type +``` + +For example, to handle a logs query response with tables and display it using pandas: ```python -import os -import pandas as pd -from azure.monitor.query import LogsQueryClient -from azure.identity import DefaultAzureCredential +table = response.tables[0] +df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns]) +``` -credential = DefaultAzureCredential() -client = LogsQueryClient(credential) +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py). -response = client.query( - os.environ['LOG_WORKSPACE_ID'], - "range x from 1 to 10000000000 step 1 | count", - server_timeout=1, - ) +In a similar fashion, to handle a batch logs query response: + +```python +for result in response: + table = result.tables[0] + df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns]) ``` +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py). + ### Batch logs query -The following example demonstrates sending multiple queries at the same time using batch query API. The queries can either be represented as a list of `LogQueryRequest` objects or a dictionary. This example uses the former approach. +The following example demonstrates sending multiple queries at the same time using batch query API. The queries can either be represented as a list of `LogsBatchQuery` objects or a dictionary. This example uses the former approach. ```python import os @@ -196,7 +204,7 @@ requests = [ timespan=(datetime(2021, 6, 2), timedelta(hours=1)), workspace_id=os.environ['LOG_WORKSPACE_ID'] ), - LogsBatchQueryRequest( + LogsBatchQuery( query= "AppRequests | take 2", workspace_id=os.environ['LOG_WORKSPACE_ID'] ), @@ -213,43 +221,51 @@ for rsp in response: print(df) ``` -#### Handling the response for Logs Query +### Advanced logs query scenarios -The `query` API returns the `LogsQueryResult` while the `batch_query` API returns list of `LogsQueryResult`. +#### Set logs query timeout -Here is a heirarchy of the response: +The following example shows setting a server timeout in seconds. A gateway timeout is raised if the query takes more time than the mentioned timeout. The default is 180 seconds and can be set up to 10 minutes (600 seconds). -``` -LogsQueryResult -|---statistics -|---visualization -|---error -|---tables (list of `LogsTable` objects) - |---name - |---rows - |---columns (list of `LogsTableColumn` objects) - |---name - |---type -``` +```python +import os +import pandas as pd +from azure.monitor.query import LogsQueryClient +from azure.identity import DefaultAzureCredential -So, to handle a response with tables and display it using pandas, +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) -```python -table = response.tables[0] -df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns]) +response = client.query( + os.environ['LOG_WORKSPACE_ID'], + "range x from 1 to 10000000000 step 1 | count", + server_timeout=1, + ) ``` -A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py). -In a very similar fashion, to handle a batch response, +#### Query multiple workspaces + +The same logs query can be executed across multiple Log Analytics workspaces. In addition to the Kusto query, the following parameters are required: + +- `workspace_id` - The first (primary) workspace ID. +- `additional_workspaces` - A list of workspaces, excluding the workspace provided in the `workspace_id` parameter. The parameter's list items may consist of the following identifier formats: + - Qualified workspace names + - Workspace IDs + - Azure resource IDs + +For example, the following query executes in three workspaces: ```python -for result in response: - table = result.tables[0] - df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns]) +client.query( + , + query, + additional_workspaces=['', ''] + ) ``` -A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py). -### Query metrics +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py). + +### Metrics query The following example gets metrics for an Event Grid subscription. The resource URI is that of an event grid topic. @@ -285,7 +301,7 @@ for metric in response.metrics: print(metric_value.time_stamp) ``` -### Handle metrics response +#### Handle metrics query response The metrics query API returns a `MetricsResult` object. The `MetricsResult` object contains properties such as a list of `Metric`-typed objects, `granularity`, `namespace`, and `timespan`. The `Metric` objects list can be accessed using the `metrics` param. Each `Metric` object in this list contains a list of `TimeSeriesElement` objects. Each `TimeSeriesElement` contains `data` and `metadata_values` properties. In visual form, the object hierarchy of the response resembles the following structure: @@ -337,30 +353,6 @@ for metric in response.metrics: ) ``` -### Advanced scenarios - -#### Query multiple workspaces - -The same log query can be executed across multiple Log Analytics workspaces. In addition to the KQL query, the following parameters are required: - -- `workspace_id` - The first (primary) workspace ID. -- `additional_workspaces` - A list of workspaces, excluding the workspace provided in the `workspace_id` parameter. The parameter's list items may consist of the following identifier formats: - - Qualified workspace names - - Workspace IDs - - Azure resource IDs - -For example, the following query executes in three workspaces: - -```python -client.query( - , - query, - additional_workspaces=['', ''] - ) -``` - -A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py). - ## Troubleshooting Enable the `azure.monitor.query` logger to collect traces from the library. @@ -379,9 +371,7 @@ Optional keyword arguments can be passed in at the client and per-operation leve ## Next steps -### Additional documentation - -For more extensive documentation, see the [Azure Monitor Query documentation][python-query-product-docs]. +To learn more about Azure Monitor, see the [Azure Monitor service documentation][azure_monitor_overview]. ## Contributing @@ -393,19 +383,19 @@ This project has adopted the [Microsoft Open Source Code of Conduct][code_of_con -[azure_cli_link]: https://pypi.org/project/azure-cli/ -[python-query-src]: https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/ -[python-query-pypi]: https://aka.ms/azsdk-python-monitor-query-pypi -[python-query-product-docs]: https://docs.microsoft.com/azure/azure-monitor/ -[python-query-ref-docs]: https://docs.microsoft.com/python/api/overview/azure/monitor-query-readme?view=azure-python-preview -[python-query-samples]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query/samples -[python-query-changelog]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query/CHANGELOG.md -[pip]: https://pypi.org/project/pip/ - [azure_core_exceptions]: https://aka.ms/azsdk/python/core/docs#module-azure.core.exceptions -[python_logging]: https://docs.python.org/3/library/logging.html [azure_core_ref_docs]: https://aka.ms/azsdk/python/core/docs +[azure_monitor_create_using_portal]: https://docs.microsoft.com/azure/azure-monitor/logs/quick-create-workspace +[azure_monitor_overview]: https://docs.microsoft.com/azure/azure-monitor/ [azure_subscription]: https://azure.microsoft.com/free/python/ +[changelog]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query/CHANGELOG.md +[kusto_query_language]: https://docs.microsoft.com/azure/data-explorer/kusto/query/ +[package]: https://aka.ms/azsdk-python-monitor-query-pypi +[pip]: https://pypi.org/project/pip/ +[python_logging]: https://docs.python.org/3/library/logging.html +[python-query-ref-docs]: https://docs.microsoft.com/python/api/overview/azure/monitor-query-readme?view=azure-python-preview +[samples]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query/samples +[source]: https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/ [cla]: https://cla.microsoft.com [code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py index 9043ee10d44c..1171a0562dfc 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py @@ -7,10 +7,16 @@ from ._logs_query_client import LogsQueryClient from ._metrics_query_client import MetricsQueryClient +from ._exceptions import ( + LogsQueryError, + QueryPartialErrorException +) + from ._models import ( MetricAggregationType, LogsQueryResult, LogsTable, + LogsTableRow, MetricsResult, LogsBatchQuery, MetricNamespace, @@ -30,7 +36,10 @@ "MetricAggregationType", "LogsQueryClient", "LogsQueryResult", + "LogsQueryError", + "QueryPartialErrorException", "LogsTable", + "LogsTableRow", "LogsBatchQuery", "MetricsQueryClient", "MetricNamespace", diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py new file mode 100644 index 000000000000..f849f93ff6ee --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py @@ -0,0 +1,79 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError + +class LogsQueryError(object): + """The code and message for an error. + + All required parameters must be populated in order to send to Azure. + + :ivar code: A machine readable error code. + :vartype code: str + :ivar message: A human readable error message. + :vartype message: str + :ivar details: error details. + :vartype details: list[~monitor_query_client.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.query.LogsQueryError + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: object + :ivar bool is_error: Boolean check for error item when iterating over list of + results. Always True for an instance of a LogsQueryError. + """ + def __init__( + self, + **kwargs + ): + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.details = kwargs.get('details', None) + self.innererror = kwargs.get('innererror', None) + self.additional_properties = kwargs.get('additional_properties', None) + self.is_error = True + + @classmethod + def _from_generated(cls, generated): + if not generated: + return None + details = None + if generated.details is not None: + details = [d.serialize() for d in generated.details] + return cls( + code=generated.code, + message=generated.message, + innererror=cls._from_generated(generated.innererror) if generated.innererror else None, + additional_properties=generated.additional_properties, + details=details, + ) + +class QueryPartialErrorException(HttpResponseError): + """There is a partial failure in query operation. This is thrown for a single query operation + when allow_partial_errors is set to False. + + :ivar code: A machine readable error code. + :vartype code: str + :ivar message: A human readable error message. + :vartype message: str + :ivar details: error details. + :vartype details: list[~monitor_query_client.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.query.LogsQueryError + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: object + """ + + def __init__(self, **kwargs): + error = kwargs.pop('error', None) + if error: + self.code = error.code + self.message = error.message + self.details = [d.serialize() for d in error.details] if error.details else None + self.innererror = LogsQueryError._from_generated(error.innererror) if error.innererror else None + self.additional_properties = error.additional_properties + super(QueryPartialErrorException, self).__init__(message=self.message) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py index 8b3194f3dc14..5adf5bc095d9 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py @@ -41,13 +41,21 @@ def get_metrics_authentication_policy( raise TypeError("Unsupported credential") -def process_error(exception): - raise_error = HttpResponseError - raise raise_error(message=exception.message, response=exception.response) - -def order_results(request_order, mapping, obj): +def order_results(request_order, mapping, obj, err, allow_partial_errors=False): ordered = [mapping[id] for id in request_order] - return [obj._from_generated(rsp) for rsp in ordered] # pylint: disable=protected-access + results = [] + for item in ordered: + if not item.body.error: + results.append(obj._from_generated(item.body)) # pylint: disable=protected-access + else: + error = item.body.error + if allow_partial_errors and error.code == 'PartialError': + res = obj._from_generated(item.body) # pylint: disable=protected-access + res.partial_error = err._from_generated(error) # pylint: disable=protected-access + results.append(res) + else: + results.append(err._from_generated(error)) # pylint: disable=protected-access + return results def construct_iso8601(timespan=None): if not timespan: @@ -90,3 +98,23 @@ def native_col_type(col_type, value): def process_row(col_types, row): return [native_col_type(col_types[ind], val) for ind, val in enumerate(row)] + +def process_error(error, model): + try: + model = model._from_generated(error.model.error) # pylint: disable=protected-access + except AttributeError: # model can be none + pass + raise HttpResponseError( + message=error.message, + response=error.response, + model=model) + +def process_prefer(server_timeout, include_statistics, include_visualization): + prefer = "" + if server_timeout: + prefer += "wait=" + str(server_timeout) + "," + if include_statistics: + prefer += "include-statistics=true," + if include_visualization: + prefer += "include-render=true" + return prefer.rstrip(",") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py index b555985d44aa..ccb68fdd6414 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py @@ -12,8 +12,9 @@ from ._generated._monitor_query_client import MonitorQueryClient from ._generated.models import BatchRequest, QueryBody as LogsQueryBody -from ._helpers import get_authentication_policy, process_error, construct_iso8601, order_results +from ._helpers import get_authentication_policy, construct_iso8601, order_results, process_error, process_prefer from ._models import LogsBatchQuery, LogsQueryResult +from ._exceptions import LogsQueryError, QueryPartialErrorException if TYPE_CHECKING: from azure.core.credentials import TokenCredential @@ -76,6 +77,8 @@ def query(self, workspace_id, query, **kwargs): :keyword additional_workspaces: A list of workspaces that are included in the query. These can be qualified workspace names, workspace Ids, or Azure resource Ids. :paramtype additional_workspaces: list[str] + :keyword allow_partial_errors: Defaults to False. If set to true, partial errors are not thrown. + :paramtype allow_partial_errors: bool :return: LogsQueryResult, or the result of cls(response) :rtype: ~azure.monitor.query.LogsQueryResult :raises: ~azure.core.exceptions.HttpResponseError @@ -89,6 +92,7 @@ def query(self, workspace_id, query, **kwargs): :dedent: 0 :caption: Get a response for a single Log Query """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) if 'timespan' not in kwargs: raise TypeError("query() missing 1 required keyword-only argument: 'timespan'") timespan = construct_iso8601(kwargs.pop('timespan')) @@ -97,17 +101,7 @@ def query(self, workspace_id, query, **kwargs): server_timeout = kwargs.pop("server_timeout", None) workspaces = kwargs.pop("additional_workspaces", None) - prefer = "" - if server_timeout: - prefer += "wait=" + str(server_timeout) - if include_statistics: - if len(prefer) > 0: - prefer += "," - prefer += "include-statistics=true" - if include_visualization: - if len(prefer) > 0: - prefer += "," - prefer += "include-render=true" + prefer = process_prefer(server_timeout, include_statistics, include_visualization) body = LogsQueryBody( query=query, @@ -117,14 +111,23 @@ def query(self, workspace_id, query, **kwargs): ) try: - return LogsQueryResult._from_generated(self._query_op.execute( # pylint: disable=protected-access + generated_response = self._query_op.execute( # pylint: disable=protected-access workspace_id=workspace_id, body=body, prefer=prefer, **kwargs - )) - except HttpResponseError as e: - process_error(e) + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + if not generated_response.error: + return response + if not allow_partial_errors: + raise QueryPartialErrorException(error=generated_response.error) + response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access + generated_response.error + ) + return response @distributed_trace def query_batch(self, queries, **kwargs): @@ -136,6 +139,9 @@ def query_batch(self, queries, **kwargs): :param queries: The list of Kusto queries to execute. :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned + when a partial error occurs. The error can be accessed using the `partial_error` + attribute in the object. :return: List of LogsQueryResult, or the result of cls(response) :rtype: list[~azure.monitor.query.LogsQueryResult] :raises: ~azure.core.exceptions.HttpResponseError @@ -149,6 +155,7 @@ def query_batch(self, queries, **kwargs): :dedent: 0 :caption: Get a response for multiple Log Queries. """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) try: queries = [LogsBatchQuery(**q) for q in queries] except (KeyError, TypeError): @@ -161,7 +168,12 @@ def query_batch(self, queries, **kwargs): batch = BatchRequest(requests=queries) generated = self._query_op.batch(batch, **kwargs) mapping = {item.id: item for item in generated.responses} - return order_results(request_order, mapping, LogsQueryResult) + return order_results( + request_order, + mapping, + LogsQueryResult, + LogsQueryError, + allow_partial_errors) def close(self): # type: () -> None diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py index 30a99f6f82a0..78f168bb59de 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py @@ -148,8 +148,8 @@ def list_metric_namespaces(self, resource_uri, **kwargs): **kwargs) @distributed_trace - def list_metric_definitions(self, resource_uri, metric_namespace=None, **kwargs): - # type: (str, str, Any) -> ItemPaged[MetricDefinition] + def list_metric_definitions(self, resource_uri, **kwargs): + # type: (str, Any) -> ItemPaged[MetricDefinition] """Lists the metric definitions for the resource. :param resource_uri: The identifier of the resource. diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py index 84e0a05f4cd1..daa4977803fc 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py @@ -28,7 +28,7 @@ class LogsTable(object): :ivar column_types: The types of columns in this table. :vartype columns: list[object] :ivar rows: Required. The resulting rows from this query. - :vartype rows: list[list[object]] + :vartype rows: list[~azure.monitor.query.LogsTableRow] """ def __init__(self, **kwargs): # type: (Any) -> None @@ -36,7 +36,14 @@ def __init__(self, **kwargs): self.columns = kwargs.pop('columns', None) # type: Optional[str] self.columns_types = kwargs.pop('column_types', None) # type: Optional[Any] _rows = kwargs.pop('rows', None) - self.rows = [process_row(self.columns_types, row) for row in _rows] + self.rows = [ + LogsTableRow( + row=row, + row_index=ind, + col_types=self.columns_types, + columns=self.columns + ) for ind, row in enumerate(_rows) + ] @classmethod def _from_generated(cls, generated): @@ -48,6 +55,40 @@ def _from_generated(cls, generated): ) +class LogsTableRow(object): + """Represents a single row in logs table. + + ivar list row: The collection of values in the row. + ivar int row_index: The index of the row in the table + """ + def __init__(self, **kwargs): + # type: (Any) -> None + _col_types = kwargs['col_types'] + row = kwargs['row'] + self.row = process_row(_col_types, row) + self.row_index = kwargs['row_index'] + _columns = kwargs['columns'] + self._row_dict = { + _columns[i]: self.row[i] for i in range(len(self.row)) + } + + def __iter__(self): + """This will iterate over the row directly. + """ + return iter(self.row) + + def __getitem__(self, column): + """This type must be subscriptable directly to row. + Must be gettableby both column name and row index + Example: row[0] -> returns the first element of row and + row[column_name] -> returns the row element against the given column name. + """ + try: + return self._row_dict[column] + except KeyError: + return self.row[column] + + class MetricsResult(object): """The response to a metrics query. @@ -165,21 +206,27 @@ class LogsQueryResult(object): :ivar visualization: This will include a visualization property in the response that specifies the type of visualization selected by the query and any properties for that visualization. :vartype visualization: object - :ivar error: Any error info. - :vartype error: ~azure.core.exceptions.HttpResponseError + :ivar partial_error: Any error info. This is none except in the case where `allow_partial_errors` + is explicitly set to True. + :vartype partial_error: ~azure.core.exceptions.HttpResponseError + :ivar bool is_error: Boolean check for error item when iterating over list of + results. Always False for an instance of a LogsQueryResult. """ def __init__( self, **kwargs ): self.tables = kwargs.get('tables', None) - self.error = kwargs.get('error', None) + self.partial_error = None self.statistics = kwargs.get('statistics', None) self.visualization = kwargs.get('visualization', None) + self.is_error = False + + def __iter__(self): + return iter(self.tables) @classmethod def _from_generated(cls, generated): - if not generated: return cls() tables = None @@ -195,7 +242,6 @@ def _from_generated(cls, generated): tables=tables, statistics=generated.statistics, visualization=generated.render, - error=generated.error ) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py index 3938d6c3e835..9ed953daa8fe 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py @@ -5,4 +5,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "1.0.0b4" +VERSION = "1.0.0b5" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py index 090fcceb9ed8..e31806a110fa 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py @@ -7,15 +7,16 @@ from datetime import datetime, timedelta from typing import Any, Tuple, Union, Sequence, Dict, List, TYPE_CHECKING -from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.exceptions import HttpResponseError from .._generated.aio._monitor_query_client import MonitorQueryClient from .._generated.models import BatchRequest, QueryBody as LogsQueryBody -from .._helpers import process_error, construct_iso8601, order_results +from .._helpers import construct_iso8601, order_results, process_error, process_prefer from .._models import LogsQueryResult, LogsBatchQuery from ._helpers_asyc import get_authentication_policy +from .._exceptions import LogsQueryError, QueryPartialErrorException if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -75,23 +76,14 @@ async def query( :rtype: ~azure.monitor.query.LogsQueryResult :raises: ~azure.core.exceptions.HttpResponseError """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) timespan = construct_iso8601(timespan) include_statistics = kwargs.pop("include_statistics", False) include_visualization = kwargs.pop("include_visualization", False) server_timeout = kwargs.pop("server_timeout", None) additional_workspaces = kwargs.pop("additional_workspaces", None) - prefer = "" - if server_timeout: - prefer += "wait=" + str(server_timeout) - if include_statistics: - if len(prefer) > 0: - prefer += "," - prefer += "include-statistics=true" - if include_visualization: - if len(prefer) > 0: - prefer += "," - prefer += "include-render=true" + prefer = process_prefer(server_timeout, include_statistics, include_visualization) body = LogsQueryBody( query=query, @@ -101,14 +93,23 @@ async def query( ) try: - return LogsQueryResult._from_generated(await self._query_op.execute( # pylint: disable=protected-access + generated_response = await self._query_op.execute( # pylint: disable=protected-access workspace_id=workspace_id, body=body, prefer=prefer, **kwargs - )) - except HttpResponseError as e: - process_error(e) + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + if not generated_response.error: + return response + if not allow_partial_errors: + raise QueryPartialErrorException(error=generated_response.error) + response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access + generated_response.error + ) + return response @distributed_trace_async async def query_batch( @@ -123,10 +124,14 @@ async def query_batch( :param queries: The list of Kusto queries to execute. :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned + when a partial error occurs. The error can be accessed using the `partial_error` + attribute in the object. :return: list of LogsQueryResult objects, or the result of cls(response) :rtype: list[~azure.monitor.query.LogsQueryResult] :raises: ~azure.core.exceptions.HttpResponseError """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) try: queries = [LogsBatchQuery(**q) for q in queries] except (KeyError, TypeError): @@ -139,7 +144,12 @@ async def query_batch( batch = BatchRequest(requests=queries) generated = await self._query_op.batch(batch, **kwargs) mapping = {item.id: item for item in generated.responses} - return order_results(request_order, mapping, LogsQueryResult) + return order_results( + request_order, + mapping, + LogsQueryResult, + LogsQueryError, + allow_partial_errors) async def __aenter__(self) -> "LogsQueryClient": await self._client.__aenter__() diff --git a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py index c0838643dce4..43444c369c3c 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py @@ -20,27 +20,30 @@ workspace_id= os.environ['LOG_WORKSPACE_ID'] ), LogsBatchQuery( - query= """AppRequests | take 10 | - summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", - timespan=(datetime(2021, 6, 2), timedelta(hours=1)), + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), workspace_id= os.environ['LOG_WORKSPACE_ID'] ), LogsBatchQuery( - query= "AppRequests | take 5", + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", workspace_id= os.environ['LOG_WORKSPACE_ID'], timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), include_statistics=True ), ] -responses = client.query_batch(requests) +responses = client.query_batch(requests, allow_partial_errors=False) for response in responses: - try: + if not response.is_error: table = response.tables[0] df = pd.DataFrame(table.rows, columns=table.columns) print(df) print("\n\n-------------------------\n\n") - except TypeError: - print(response.error.innererror) + else: + error = response + print(error.message) + # [END send_query_batch] \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py b/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py index 917f6a16e698..d16d38513f65 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py @@ -4,7 +4,8 @@ import os import pandas as pd from datetime import timedelta -from azure.monitor.query import LogsQueryClient +from azure.monitor.query import LogsQueryClient, QueryPartialErrorException +from azure.core.exceptions import HttpResponseError from azure.identity import DefaultAzureCredential # [START client_auth_with_token_cred] @@ -16,21 +17,21 @@ # Response time trend # request duration over the last 12 hours. # [START send_logs_query] -query = """AppRequests | -summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" +query = """AppRequests | take 5""" # returns LogsQueryResult -response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) - -if not response.tables: - print("No results for the query") - -for table in response.tables: - try: - df = pd.DataFrame(table.rows, columns=table.columns) +try: + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + for table in response: + df = pd.DataFrame(data=table.rows, columns=table.columns) print(df) - except TypeError: - print(response.error) +except QueryPartialErrorException as err: + print("this is a partial error") + print(err.details) +except HttpResponseError as err: + print("something fatal happened") + print (err) + # [END send_logs_query] """ TimeGenerated _ResourceId avgRequestDuration diff --git a/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py b/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py new file mode 100644 index 000000000000..cf46756a6ddf --- /dev/null +++ b/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py @@ -0,0 +1,171 @@ +from datetime import timedelta, datetime +import pytest +import os +from azure.identity.aio import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.query import LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException +from azure.monitor.query.aio import LogsQueryClient + +def _credential(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = os.environ['AZURE_CLIENT_SECRET'], + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + return credential + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + with pytest.raises(HttpResponseError): + await client.query('bad_workspace_id', 'AppRequests', timespan=None) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + with pytest.raises(QueryPartialErrorException) as err: + await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True) + assert response.partial_error is not None + assert response.partial_error.code == 'PartialError' + assert response.partial_error.__class__ == LogsQueryError + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_fatal_exception(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = 'bad_secret', + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + with pytest.raises(HttpResponseError): + await client.query_batch(requests, allow_partial_errors=True) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests, allow_partial_errors=True) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryResult + assert r3.partial_error is not None + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_non_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """Bad Query""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError diff --git a/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py b/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py index bf7ade3b9ad2..1894d2de3d84 100644 --- a/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py +++ b/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py @@ -4,7 +4,7 @@ import os from azure.identity.aio import ClientSecretCredential from azure.core.exceptions import HttpResponseError -from azure.monitor.query import LogsBatchQuery +from azure.monitor.query import LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult, LogsTableRow from azure.monitor.query.aio import LogsQueryClient def _credential(): @@ -100,7 +100,7 @@ async def test_logs_query_batch_default(): assert r1.tables[0].columns[1] == '_ResourceId' assert r1.tables[0].columns[2] == 'avgRequestDuration' r2 = response[2] - assert r2.error is not None + assert r2.__class__ == LogsQueryError @pytest.mark.skip('https://github.com/Azure/azure-sdk-for-python/issues/19382') @pytest.mark.live_test_only @@ -178,3 +178,47 @@ async def test_logs_single_query_with_render_and_stats(): assert response.visualization is not None assert response.statistics is not None + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_query_result_iterate_over_tables(): + client = LogsQueryClient(_credential()) + + query = "AppRequests; AppRequests | take 5" + + response = await client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + include_statistics=True, + include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_query_result_row_type(): + client = LogsQueryClient(_credential()) + + query = "AppRequests | take 5" + + response = await client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + ) + + ## should iterate over tables + for table in response: + assert table.__class__ == LogsTable + + for row in table.rows: + assert row.__class__ == LogsTableRow diff --git a/sdk/monitor/azure-monitor-query/tests/test_exceptions.py b/sdk/monitor/azure-monitor-query/tests/test_exceptions.py new file mode 100644 index 000000000000..92d28cd75dd9 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/tests/test_exceptions.py @@ -0,0 +1,163 @@ +from datetime import timedelta, datetime +import pytest +import os +from azure.identity import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException + +def _credential(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = os.environ['AZURE_CLIENT_SECRET'], + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + return credential + +@pytest.mark.live_test_only +def test_logs_single_query_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + with pytest.raises(HttpResponseError): + client.query('bad_workspace_id', 'AppRequests', timespan=None) + +@pytest.mark.live_test_only +def test_logs_single_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + with pytest.raises(QueryPartialErrorException) as err: + client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + +@pytest.mark.live_test_only +def test_logs_single_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True) + assert response.partial_error is not None + assert response.partial_error.code == 'PartialError' + assert response.partial_error.__class__ == LogsQueryError + +@pytest.mark.live_test_only +def test_logs_batch_query_fatal_exception(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = 'bad_secret', + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + with pytest.raises(HttpResponseError): + responses = client.query_batch(requests, allow_partial_errors=True) + +@pytest.mark.live_test_only +def test_logs_batch_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError + +@pytest.mark.live_test_only +def test_logs_batch_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests, allow_partial_errors=True) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryResult + assert r3.partial_error is not None + +@pytest.mark.live_test_only +def test_logs_batch_query_non_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """Bad Query""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError diff --git a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py index 248f44796cbe..a9036055eb0d 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py +++ b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py @@ -3,7 +3,7 @@ import os from azure.identity import ClientSecretCredential from azure.core.exceptions import HttpResponseError -from azure.monitor.query import LogsQueryClient, LogsBatchQuery +from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult, LogsTableRow def _credential(): credential = ClientSecretCredential( @@ -55,11 +55,12 @@ def test_logs_single_query_with_non_200(): def test_logs_single_query_with_partial_success(): credential = _credential() client = LogsQueryClient(credential) - query = "set truncationmaxrecords=1; union * | project TimeGenerated | take 10" + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, allow_partial_errors=True) - response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) - - assert response is not None + assert response.partial_error is not None @pytest.mark.skip("https://github.com/Azure/azure-sdk-for-python/issues/19917") @pytest.mark.live_test_only @@ -108,7 +109,7 @@ def test_logs_query_batch_default(): assert r1.tables[0].columns[1] == '_ResourceId' assert r1.tables[0].columns[2] == 'avgRequestDuration' r2 = response[2] - assert r2.error is not None + assert r2.__class__ == LogsQueryError @pytest.mark.live_test_only def test_logs_single_query_with_statistics(): @@ -221,3 +222,45 @@ def test_logs_query_batch_additional_workspaces(): for resp in response: assert len(resp.tables[0].rows) == 2 + +@pytest.mark.live_test_only +def test_logs_query_result_iterate_over_tables(): + client = LogsQueryClient(_credential()) + + query = "AppRequests; AppRequests | take 5" + + response = client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + include_statistics=True, + include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult + +@pytest.mark.live_test_only +def test_logs_query_result_row_type(): + client = LogsQueryClient(_credential()) + + query = "AppRequests | take 5" + + response = client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + ) + + ## should iterate over tables + for table in response: + assert table.__class__ == LogsTable + + for row in table.rows: + assert row.__class__ == LogsTableRow diff --git a/sdk/schemaregistry/azure-schemaregistry-avroserializer/CHANGELOG.md b/sdk/schemaregistry/azure-schemaregistry-avroserializer/CHANGELOG.md index ce6d28a08c1d..c0c620b6c1b5 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroserializer/CHANGELOG.md +++ b/sdk/schemaregistry/azure-schemaregistry-avroserializer/CHANGELOG.md @@ -11,6 +11,8 @@ - `schema_registry` parameter in the `SchemaRegistryAvroSerializer` constructor has been renamed `client`. - `schema_group` parameter in the `SchemaRegistryAvroSerializer` constructor has been renamed `group_name`. - `data` parameter in the `serialize` and `deserialize` methods on `SchemaRegistryAvroSerializer` has been renamed `value`. +- `schema` parameter in the `serialize` method on `SchemaRegistryAvroSerializer` no longer accepts argument of type `bytes`. +- `SchemaRegistryAvroSerializer` constructor no longer takes in the `codec` keyword argument. ### Bugs Fixed diff --git a/sdk/schemaregistry/azure-schemaregistry-avroserializer/azure/schemaregistry/serializer/avroserializer/_schema_registry_avro_serializer.py b/sdk/schemaregistry/azure-schemaregistry-avroserializer/azure/schemaregistry/serializer/avroserializer/_schema_registry_avro_serializer.py index 73eb25a9b31d..e385630a81e2 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroserializer/azure/schemaregistry/serializer/avroserializer/_schema_registry_avro_serializer.py +++ b/sdk/schemaregistry/azure-schemaregistry-avroserializer/azure/schemaregistry/serializer/avroserializer/_schema_registry_avro_serializer.py @@ -42,7 +42,6 @@ class SchemaRegistryAvroSerializer(object): :param str group_name: Schema group under which schema should be registered. :keyword bool auto_register_schemas: When true, register new schemas passed to serialize. Otherwise, and by default, fail if it has not been pre-registered in the registry. - :keyword str codec: The writer codec. If None, let the avro library decides. """ @@ -130,7 +129,7 @@ def serialize(self, value, schema, **kwargs): :param value: The data to be encoded. :type value: Dict[str, Any] :param schema: The schema used to encode the data. - :type schema: Union[str, bytes] + :type schema: str :rtype: bytes """ raw_input_schema = schema diff --git a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/avro_serializer.py b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/avro_serializer.py index 031ce09c7aa6..c7a40e56104e 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/avro_serializer.py +++ b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/avro_serializer.py @@ -34,7 +34,7 @@ CLIENT_SECRET=os.environ['SCHEMA_REGISTRY_AZURE_CLIENT_SECRET'] SCHEMA_REGISTRY_ENDPOINT=os.environ['SCHEMA_REGISTRY_ENDPOINT'] -SCHEMA_GROUP=os.environ['SCHEMA_REGISTRY_GROUP'] +GROUP_NAME=os.environ['SCHEMA_REGISTRY_GROUP'] SCHEMA_STRING = """ {"namespace": "example.avro", "type": "record", @@ -80,7 +80,7 @@ def deserialize(serializer, bytes_payload): if __name__ == '__main__': schema_registry = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=token_credential) - serializer = SchemaRegistryAvroSerializer(schema_registry, SCHEMA_GROUP) + serializer = SchemaRegistryAvroSerializer(schema_registry, GROUP_NAME) bytes_data_ben, bytes_data_alice = serialize(serializer) dict_data_ben = deserialize(serializer, bytes_data_ben) dict_data_alice = deserialize(serializer, bytes_data_alice) diff --git a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_receive_integration.py b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_receive_integration.py index 6d4fc4a8933b..783fc7167603 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_receive_integration.py +++ b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_receive_integration.py @@ -20,7 +20,7 @@ EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] SCHEMA_REGISTRY_ENDPOINT = os.environ['SCHEMA_REGISTRY_ENDPOINT'] -SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] +GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] def on_event(partition_context, event): @@ -50,7 +50,7 @@ def on_event(partition_context, event): endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=DefaultAzureCredential() ), - group_name=SCHEMA_GROUP + group_name=GROUP_NAME ) diff --git a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_send_integration.py b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_send_integration.py index 960ba3a47ffa..8169e9ea2734 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_send_integration.py +++ b/sdk/schemaregistry/azure-schemaregistry-avroserializer/samples/eventhub_send_integration.py @@ -21,7 +21,7 @@ EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] SCHEMA_REGISTRY_ENDPOINT = os.environ['SCHEMA_REGISTRY_ENDPOINT'] -SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] +GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] SCHEMA_STRING = """ {"namespace": "example.avro", @@ -64,7 +64,7 @@ def send_event_data_batch(producer, serializer): endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=DefaultAzureCredential() ), - group_name=SCHEMA_GROUP + group_name=GROUP_NAME ) diff --git a/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/sample_code_schemaregistry_async.py b/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/sample_code_schemaregistry_async.py index 6b64971132e2..6d4b8aaa8540 100644 --- a/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/sample_code_schemaregistry_async.py +++ b/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/sample_code_schemaregistry_async.py @@ -47,32 +47,32 @@ def create_client(): async def register_schema(schema_registry_client): # [START register_schema_async] - SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] - SCHEMA_NAME = 'your-schema-name' + GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] + NAME = 'your-schema-name' SERIALIZATION_TYPE = SerializationType.AVRO - SCHEMA_CONTENT = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" - schema_properties = await schema_registry_client.register_schema(SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_CONTENT, SERIALIZATION_TYPE) + CONTENT = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" + schema_properties = await schema_registry_client.register_schema(GROUP_NAME, NAME, CONTENT, SERIALIZATION_TYPE) schema_id = schema_properties.id # [END register_schema_async] return schema_id -async def get_schema(schema_registry_client, schema_id): +async def get_schema(schema_registry_client, id): # [START get_schema_async] - schema = await schema_registry_client.get_schema(schema_id) + schema = await schema_registry_client.get_schema(id) schema_content = schema.content # [END get_schema_async] return schema_content async def get_schema_id(schema_registry_client): - schema_group = os.environ['SCHEMA_REGISTRY_GROUP'] - schema_name = 'your-schema-name' + group_name = os.environ['SCHEMA_REGISTRY_GROUP'] + name = 'your-schema-name' serialization_type = SerializationType.AVRO - schema_content = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" + content = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" # [START get_schema_id_async] - schema_properties = await schema_registry_client.get_schema_properties(schema_group, schema_name, schema_content, serialization_type) + schema_properties = await schema_registry_client.get_schema_properties(group_name, name, content, serialization_type) schema_id = schema_properties.id # [END get_schema_id_async] return schema_id diff --git a/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/schema_registry_async.py b/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/schema_registry_async.py index 766d6075ba67..d115aa4ccbc6 100644 --- a/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/schema_registry_async.py +++ b/sdk/schemaregistry/azure-schemaregistry/samples/async_samples/schema_registry_async.py @@ -23,31 +23,31 @@ CLIENT_SECRET = os.environ['SCHEMA_REGISTRY_AZURE_CLIENT_SECRET'] SCHEMA_REGISTRY_ENDPOINT = os.environ['SCHEMA_REGISTRY_ENDPOINT'] -SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] -SCHEMA_NAME = 'your-schema-name' +GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] +NAME = 'your-schema-name' SERIALIZATION_TYPE = SerializationType.AVRO SCHEMA_STRING = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" -async def register_schema(client, schema_group, schema_name, schema_string, serialization_type): +async def register_schema(client, group_name, name, schema_string, serialization_type): print("Registering schema...") - schema_properties = await client.register_schema(schema_group, schema_name, schema_string, serialization_type) + schema_properties = await client.register_schema(group_name, name, schema_string, serialization_type) print("Schema registered, returned schema id is {}".format(schema_properties.id)) print("Schema properties are {}".format(schema_properties)) return schema_properties.id -async def get_schema_by_id(client, schema_id): +async def get_schema_by_id(client, id): print("Getting schema by id...") - schema = await client.get_schema(schema_id) - print("The schema string of schema id: {} string is {}".format(schema_id, schema.content)) - print("Schema properties are {}".format(schema_id)) + schema = await client.get_schema(id) + print("The schema string of schema id: {} string is {}".format(id, schema.content)) + print("Schema properties are {}".format(id)) return schema.content -async def get_schema_id(client, schema_group, schema_name, schema_string, serialization_type): +async def get_schema_id(client, group_name, name, schema_string, serialization_type): print("Getting schema id...") - schema_properties = await client.get_schema_properties(schema_group, schema_name, schema_string, serialization_type) + schema_properties = await client.get_schema_properties(group_name, name, schema_string, serialization_type) print("The schema id is: {}".format(schema_properties.id)) print("Schema properties are {}".format(schema_properties)) return schema_properties.id @@ -61,9 +61,9 @@ async def main(): ) schema_registry_client = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=token_credential) async with token_credential, schema_registry_client: - schema_id = await register_schema(schema_registry_client, SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_STRING, SERIALIZATION_TYPE) + schema_id = await register_schema(schema_registry_client, GROUP_NAME, NAME, SCHEMA_STRING, SERIALIZATION_TYPE) schema_str = await get_schema_by_id(schema_registry_client, schema_id) - schema_id = await get_schema_id(schema_registry_client, SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_STRING, SERIALIZATION_TYPE) + schema_id = await get_schema_id(schema_registry_client, GROUP_NAME, NAME, SCHEMA_STRING, SERIALIZATION_TYPE) loop = asyncio.get_event_loop() diff --git a/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/sample_code_schemaregistry.py b/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/sample_code_schemaregistry.py index 001182601c38..1f3d638fb5bd 100644 --- a/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/sample_code_schemaregistry.py +++ b/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/sample_code_schemaregistry.py @@ -45,11 +45,11 @@ def create_client(): def register_schema(schema_registry_client): # [START register_schema_sync] - SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] - SCHEMA_NAME = 'your-schema-name' + GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] + NAME = 'your-schema-name' SERIALIZATION_TYPE = SerializationType.AVRO - SCHEMA_CONTENT = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" - schema_properties = schema_registry_client.register_schema(SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_CONTENT, SERIALIZATION_TYPE) + CONTENT = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" + schema_properties = schema_registry_client.register_schema(GROUP_NAME, NAME, CONTENT, SERIALIZATION_TYPE) schema_id = schema_properties.id # [END register_schema_sync] @@ -63,9 +63,9 @@ def register_schema(schema_registry_client): return schema_id -def get_schema(schema_registry_client, schema_id): +def get_schema(schema_registry_client, id): # [START get_schema_sync] - schema = schema_registry_client.get_schema(schema_id) + schema = schema_registry_client.get_schema(id) schema_content = schema.content # [END get_schema_sync] @@ -77,12 +77,12 @@ def get_schema(schema_registry_client, schema_id): def get_schema_id(schema_registry_client): - schema_group = os.environ['SCHEMA_REGISTRY_GROUP'] - schema_name = 'your-schema-name' + group_name = os.environ['SCHEMA_REGISTRY_GROUP'] + name = 'your-schema-name' serialization_type = SerializationType.AVRO - schema_content = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" + content = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" # [START get_schema_id_sync] - schema_properties = schema_registry_client.get_schema_properties(schema_group, schema_name, schema_content, serialization_type) + schema_properties = schema_registry_client.get_schema_properties(group_name, name, content, serialization_type) schema_id = schema_properties.id # [END get_schema_id_sync] return schema_id diff --git a/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/schema_registry.py b/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/schema_registry.py index 78ba7271526f..1d9338b2d055 100644 --- a/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/schema_registry.py +++ b/sdk/schemaregistry/azure-schemaregistry/samples/sync_samples/schema_registry.py @@ -43,8 +43,8 @@ CLIENT_SECRET = os.environ['SCHEMA_REGISTRY_AZURE_CLIENT_SECRET'] SCHEMA_REGISTRY_ENDPOINT = os.environ['SCHEMA_REGISTRY_ENDPOINT'] -SCHEMA_GROUP = os.environ['SCHEMA_REGISTRY_GROUP'] -SCHEMA_NAME = 'your-schema-name' +GROUP_NAME = os.environ['SCHEMA_REGISTRY_GROUP'] +NAME = 'your-schema-name' SERIALIZATION_TYPE = SerializationType.AVRO SCHEMA_JSON = { @@ -69,25 +69,25 @@ SCHEMA_STRING = json.dumps(SCHEMA_JSON, separators=(',', ':')) -def register_schema(client, schema_group, schema_name, schema_string, serialization_type): +def register_schema(client, group_name, name, schema_string, serialization_type): print("Registering schema...") - schema_properties = client.register_schema(schema_group, schema_name, schema_string, serialization_type) + schema_properties = client.register_schema(group_name, name, schema_string, serialization_type) print("Schema registered, returned schema id is {}".format(schema_properties.id)) print("Schema properties are {}".format(schema_properties)) return schema_properties.id -def get_schema_by_id(client, schema_id): +def get_schema_by_id(client, id): print("Getting schema by id...") - schema = client.get_schema(schema_id) - print("The schema string of schema id: {} string is {}".format(schema_id, schema.content)) - print("Schema properties are {}".format(schema_id)) + schema = client.get_schema(id) + print("The schema string of schema id: {} string is {}".format(id, schema.content)) + print("Schema properties are {}".format(id)) return schema.content -def get_schema_id(client, schema_group, schema_name, schema_string, serialization_type): +def get_schema_id(client, group_name, name, schema_string, serialization_type): print("Getting schema id...") - schema_properties = client.get_schema_properties(schema_group, schema_name, schema_string, serialization_type) + schema_properties = client.get_schema_properties(group_name, name, schema_string, serialization_type) print("The schema id is: {}".format(schema_properties.id)) print("Schema properties are {}".format(schema_properties)) return schema_properties.id @@ -101,7 +101,6 @@ def get_schema_id(client, schema_group, schema_name, schema_string, serializatio ) schema_registry_client = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=token_credential) with schema_registry_client: - schema_id = register_schema(schema_registry_client, SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_STRING, SERIALIZATION_TYPE) - schema_str = get_schema_by_id(schema_registry_client, schema_id=schema_id) - schema_id = get_schema_id(schema_registry_client, SCHEMA_GROUP, SCHEMA_NAME, SCHEMA_STRING, SERIALIZATION_TYPE) - + schema_id = register_schema(schema_registry_client, GROUP_NAME, NAME, SCHEMA_STRING, SERIALIZATION_TYPE) + schema_str = get_schema_by_id(schema_registry_client, schema_id) + schema_id = get_schema_id(schema_registry_client, GROUP_NAME, NAME, SCHEMA_STRING, SERIALIZATION_TYPE) diff --git a/sdk/search/azure-search-documents/CHANGELOG.md b/sdk/search/azure-search-documents/CHANGELOG.md index dce720459dba..c5911ab8e9a2 100644 --- a/sdk/search/azure-search-documents/CHANGELOG.md +++ b/sdk/search/azure-search-documents/CHANGELOG.md @@ -4,8 +4,15 @@ ### Features Added +- Added properties to `SearchClient`: `query_answer`, `query_answer_count`, + `query_caption`, `query_caption_highlight` and `semantic_fields`. + ### Breaking Changes +- Renamed `SearchClient.speller` to `SearchClient.query_speller`. +- Removed keyword arguments from `SearchClient`: `answers` and `captions`. +- `SentimentSkill`, `EntityRecognitionSkill`: added client-side validation to prevent sending unsupported parameters. + ### Bugs Fixed ### Other Changes diff --git a/sdk/search/azure-search-documents/azure/search/documents/_search_client.py b/sdk/search/azure-search-documents/azure/search/documents/_search_client.py index 27853cd783cb..68a1e180bb42 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/_search_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/_search_client.py @@ -152,7 +152,7 @@ def get_document(self, key, selected_fields=None, **kwargs): return cast(dict, result) @distributed_trace - def search(self, search_text, **kwargs): + def search(self, search_text, **kwargs): # pylint:disable=too-many-locals # type: (str, **Any) -> SearchItemPaged[dict] """Search the Azure search index for documents. @@ -200,12 +200,23 @@ def search(self, search_text, **kwargs): :keyword query_language: A value that specifies the language of the search query. Possible values include: "none", "en-us". :paramtype query_language: str or ~azure.search.documents.models.QueryLanguage - :keyword speller: A value that specified the type of the speller to use to spell-correct + :keyword query_speller: A value that specified the type of the speller to use to spell-correct individual search query terms. Possible values include: "none", "lexicon". - :paramtype speller: str or ~azure.search.documents.models.Speller - :keyword answers: A value that specifies whether answers should be returned as part of the search - response. Possible values include: "none", "extractive". - :paramtype answers: str or ~azure.search.documents.models.Answers + :paramtype query_speller: str or ~azure.search.documents.models.Speller + :keyword query_answer: This parameter is only valid if the query type is 'semantic'. If set, + the query returns answers extracted from key passages in the highest ranked documents. + Possible values include: "none", "extractive". + :paramtype query_answer: str or ~azure.search.documents.models.Answers + :keyword int query_answer_count: This parameter is only valid if the query type is 'semantic' and + query answer is 'extractive'. Configures the number of answers returned. Default count is 1. + :keyword query_caption: This parameter is only valid if the query type is 'semantic'. If set, the + query returns captions extracted from key passages in the highest ranked documents. + Defaults to 'None'. Possible values include: "none", "extractive". + :paramtype query_caption: str or ~azure.search.documents.models.Captions + :keyword bool query_caption_highlight: This parameter is only valid if the query type is 'semantic' when + query caption is set to 'extractive'. Determines whether highlighting is enabled. + Defaults to 'true'. + :keyword list[str] semantic_fields: The list of field names used for semantic search. :keyword list[str] select: The list of fields to retrieve. If unspecified, all fields marked as retrievable in the schema are included. :keyword int skip: The number of search results to skip. This value cannot be greater than 100,000. @@ -259,11 +270,25 @@ def search(self, search_text, **kwargs): search_fields_str = ",".join(search_fields) if search_fields else None search_mode = kwargs.pop("search_mode", None) query_language = kwargs.pop("query_language", None) - speller = kwargs.pop("speller", None) - answers = kwargs.pop("answers", None) + query_speller = kwargs.pop("query_speller", None) select = kwargs.pop("select", None) skip = kwargs.pop("skip", None) top = kwargs.pop("top", None) + + query_answer = kwargs.pop("query_answer", None) + query_answer_count = kwargs.pop("query_answer_count", None) + answers = query_answer if not query_answer_count else '{}|count-{}'.format( + query_answer, query_answer_count + ) + + query_caption = kwargs.pop("query_caption", None) + query_caption_highlight = kwargs.pop("query_caption_highlight", None) + captions = query_caption if not query_caption_highlight else '{}|highlight-{}'.format( + query_caption, query_caption_highlight + ) + + semantic_fields = kwargs.pop("semantic_fields", None) + query = SearchQuery( search_text=search_text, include_total_result_count=include_total_result_count, @@ -280,8 +305,10 @@ def search(self, search_text, **kwargs): search_fields=search_fields_str, search_mode=search_mode, query_language=query_language, - speller=speller, + speller=query_speller, answers=answers, + captions=captions, + semantic_fields=",".join(semantic_fields) if semantic_fields else None, select=select if isinstance(select, six.string_types) else None, skip=skip, top=top, diff --git a/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py b/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py index 650b62dd1666..e7f66ca59ae0 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py +++ b/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py @@ -128,8 +128,9 @@ async def get_document(self, key, selected_fields=None, **kwargs): ) return cast(dict, result) + @distributed_trace_async - async def search(self, search_text, **kwargs): + async def search(self, search_text, **kwargs): # pylint:disable=too-many-locals # type: (str, **Any) -> AsyncSearchItemPaged[dict] """Search the Azure search index for documents. @@ -177,12 +178,24 @@ async def search(self, search_text, **kwargs): :keyword query_language: A value that specifies the language of the search query. Possible values include: "none", "en-us". :paramtype query_language: str or ~azure.search.documents.models.QueryLanguage - :keyword speller: A value that specified the type of the speller to use to spell-correct + :keyword query_speller: A value that specified the type of the speller to use to spell-correct individual search query terms. Possible values include: "none", "lexicon". - :paramtype speller: str or ~azure.search.documents.models.Speller - :keyword answers: A value that specifies whether answers should be returned as part of the search - response. Possible values include: "none", "extractive". - :paramtype answers: str or ~azure.search.documents.models.Answers + :paramtype query_speller: str or ~azure.search.documents.models.Speller + :keyword query_answer: This parameter is only valid if the query type is 'semantic'. If set, + the query returns answers extracted from key passages in the highest ranked documents. + Possible values include: "none", "extractive". + :paramtype query_answer: str or ~azure.search.documents.models.Answers + :keyword int query_answer_count: This parameter is only valid if the query type is 'semantic' and + query answer is 'extractive'. + Configures the number of answers returned. Default count is 1. + :keyword query_caption: This parameter is only valid if the query type is 'semantic'. If set, the + query returns captions extracted from key passages in the highest ranked documents. + Defaults to 'None'. Possible values include: "none", "extractive". + :paramtype query_caption: str or ~azure.search.documents.models.Captions + :keyword bool query_caption_highlight: This parameter is only valid if the query type is 'semantic' when + query caption is set to 'extractive'. Determines whether highlighting is enabled. + Defaults to 'true'. + :keyword list[str] semantic_fields: The list of field names used for semantic search. :keyword list[str] select: The list of fields to retrieve. If unspecified, all fields marked as retrievable in the schema are included. :keyword int skip: The number of search results to skip. This value cannot be greater than 100,000. @@ -236,11 +249,25 @@ async def search(self, search_text, **kwargs): search_fields_str = ",".join(search_fields) if search_fields else None search_mode = kwargs.pop("search_mode", None) query_language = kwargs.pop("query_language", None) - speller = kwargs.pop("speller", None) - answers = kwargs.pop("answers", None) + query_speller = kwargs.pop("query_speller", None) select = kwargs.pop("select", None) skip = kwargs.pop("skip", None) top = kwargs.pop("top", None) + + query_answer = kwargs.pop("query_answer", None) + query_answer_count = kwargs.pop("query_answer_count", None) + answers = query_answer if not query_answer_count else '{}|count-{}'.format( + query_answer, query_answer_count + ) + + query_caption = kwargs.pop("query_caption", None) + query_caption_highlight = kwargs.pop("query_caption_highlight", None) + captions = query_caption if not query_caption_highlight else '{}|highlight-{}'.format( + query_caption, query_caption_highlight + ) + + semantic_fields = kwargs.pop("semantic_fields", None) + query = SearchQuery( search_text=search_text, include_total_result_count=include_total_result_count, @@ -257,8 +284,10 @@ async def search(self, search_text, **kwargs): search_fields=search_fields_str, search_mode=search_mode, query_language=query_language, - speller=speller, + speller=query_speller, answers=answers, + captions=captions, + semantic_fields=",".join(semantic_fields) if semantic_fields else None, select=select if isinstance(select, six.string_types) else None, skip=skip, top=top, diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py index 061e7d54462f..27a083e917f6 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py @@ -15,7 +15,11 @@ get_access_conditions, normalize_endpoint, ) -from .models import SearchIndexerDataSourceConnection +from .models import ( + EntityRecognitionSkillVersion, + SearchIndexerDataSourceConnection, + SentimentSkillVersion +) from .._api_versions import DEFAULT_VERSION from .._headers_mixin import HeadersMixin from .._utils import get_authentication_policy @@ -564,7 +568,9 @@ def create_skillset(self, skillset, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) + _validate_skillset(skillset) skillset = skillset._to_generated() if hasattr(skillset, '_to_generated') else skillset # pylint:disable=protected-access + result = self._client.skillsets.create(skillset, **kwargs) return SearchIndexerSkillset._from_generated(result) # pylint:disable=protected-access @@ -587,6 +593,7 @@ def create_or_update_skillset(self, skillset, **kwargs): skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) + _validate_skillset(skillset) skillset = skillset._to_generated() if hasattr(skillset, '_to_generated') else skillset # pylint:disable=protected-access result = self._client.skillsets.create_or_update( @@ -596,3 +603,44 @@ def create_or_update_skillset(self, skillset, **kwargs): **kwargs ) return SearchIndexerSkillset._from_generated(result) # pylint:disable=protected-access + +def _validate_skillset(skillset): + """Validates any multi-version skills in the skillset to verify that unsupported + parameters are not supplied by the user. + """ + skills = getattr(skillset, 'skills', None) + if not skills: + return + + error_strings = [] + for skill in skills: + try: + skill_version = skill.get('skill_version') + except AttributeError: + skill_version = getattr(skill, 'skill_version', None) + if not skill_version: + continue + + if skill_version == SentimentSkillVersion.V1: + unsupported = ['model_version', 'include_opinion_mining'] + elif skill_version == SentimentSkillVersion.V3: + unsupported = [] + elif skill_version == EntityRecognitionSkillVersion.V1: + unsupported = ['model_version'] + elif skill_version == EntityRecognitionSkillVersion.V3: + unsupported = ['include_typeless_entities'] + + errors = [] + for item in unsupported: + try: + if skill.get(item, None): + errors.append(item) + except AttributeError: + if skill.__dict__.get(item, None): + errors.append(item) + if errors: + error_strings.append("Unsupported parameters for skill version {}: {}".format( + skill_version, ", ".join(errors)) + ) + if error_strings: + raise ValueError("\n".join(error_strings)) diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_index.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_index.py index c4bc1bbe1318..778ea41176a5 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_index.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_index.py @@ -31,26 +31,26 @@ class SearchField(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the field, which must be unique within the fields collection + :keyword name: Required. The name of the field, which must be unique within the fields collection of the index or parent field. - :type name: str - :param type: Required. The data type of the field. Possible values include: "Edm.String", + :paramtype name: str + :keyword type: Required. The data type of the field. Possible values include: "Edm.String", "Edm.Int32", "Edm.Int64", "Edm.Double", "Edm.Boolean", "Edm.DateTimeOffset", "Edm.GeographyPoint", "Edm.ComplexType". - :type type: str or ~azure.search.documents.indexes.models.SearchFieldDataType - :param key: A value indicating whether the field uniquely identifies documents in the index. + :paramtype type: str or ~azure.search.documents.indexes.models.SearchFieldDataType + :keyword key: A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type Edm.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is false for simple fields and null for complex fields. - :type key: bool - :param hidden: A value indicating whether the field can be returned in a search result. + :paramtype key: bool + :keyword hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False for simple fields and null for complex fields. - :type hidden: bool - :param searchable: A value indicating whether the field is full-text searchable. This means it + :paramtype hidden: bool + :keyword searchable: A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and "day". This enables full-text searches for these terms. Fields of type Edm.String or @@ -59,15 +59,15 @@ class SearchField(msrest.serialization.Model): consume extra space in your index since Azure Cognitive Search will store an additional tokenized version of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. - :type searchable: bool - :param filterable: A value indicating whether to enable the field to be referenced in $filter + :paramtype searchable: bool + :keyword filterable: A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type Edm.String or Collection(Edm.String) that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. This property must be null for complex fields. Default is true for simple fields and null for complex fields. - :type filterable: bool - :param sortable: A value indicating whether to enable the field to be referenced in $orderby + :paramtype filterable: bool + :keyword sortable: A value indicating whether to enable the field to be referenced in $orderby expressions. By default Azure Cognitive Search sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection @@ -77,15 +77,15 @@ class SearchField(msrest.serialization.Model): cannot be sortable and the sortable property must be null for such fields. The default for sortable is true for single-valued simple fields, false for multi-valued simple fields, and null for complex fields. - :type sortable: bool - :param facetable: A value indicating whether to enable the field to be referenced in facet + :paramtype sortable: bool + :keyword facetable: A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). This property must be null for complex fields. Fields of type Edm.GeographyPoint or Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all other simple fields. - :type facetable: bool - :param analyzer_name: The name of the analyzer to use for the field. This option can be used only + :paramtype facetable: bool + :keyword analyzer_name: The name of the analyzer to use for the field. This option can be used only with searchable fields and it can't be set together with either searchAnalyzer or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the field. Must be null for complex fields. Possible values include: "ar.microsoft", "ar.lucene", "hy.lucene", @@ -105,8 +105,8 @@ class SearchField(msrest.serialization.Model): "th.microsoft", "th.lucene", "tr.microsoft", "tr.lucene", "uk.microsoft", "ur.microsoft", "vi.microsoft", "standard.lucene", "standardasciifolding.lucene", "keyword", "pattern", "simple", "stop", "whitespace". - :type analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName - :param search_analyzer_name: The name of the analyzer used at search time for the field. This option + :paramtype analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName + :keyword search_analyzer_name: The name of the analyzer used at search time for the field. This option can be used only with searchable fields. It must be set together with indexAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. This @@ -128,8 +128,8 @@ class SearchField(msrest.serialization.Model): "th.lucene", "tr.microsoft", "tr.lucene", "uk.microsoft", "ur.microsoft", "vi.microsoft", "standard.lucene", "standardasciifolding.lucene", "keyword", "pattern", "simple", "stop", "whitespace". - :type search_analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName - :param index_analyzer_name: The name of the analyzer used at indexing time for the field. This + :paramtype search_analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName + :keyword index_analyzer_name: The name of the analyzer used at indexing time for the field. This option can be used only with searchable fields. It must be set together with searchAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. Once @@ -151,21 +151,21 @@ class SearchField(msrest.serialization.Model): "th.lucene", "tr.microsoft", "tr.lucene", "uk.microsoft", "ur.microsoft", "vi.microsoft", "standard.lucene", "standardasciifolding.lucene", "keyword", "pattern", "simple", "stop", "whitespace". - :type index_analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName - :param normalizer_name: The name of the normalizer to use for the field. This option can be used + :paramtype index_analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName + :keyword normalizer_name: The name of the normalizer to use for the field. This option can be used only with fields with filterable, sortable, or facetable enabled. Once the normalizer is chosen, it cannot be changed for the field. Must be null for complex fields. Possible values include: "asciifolding", "elision", "lowercase", "standard", "uppercase". - :type normalizer_name: str or ~azure.search.documents.indexes.models.LexicalNormalizerName - :param synonym_map_names: A list of the names of synonym maps to associate with this field. This + :paramtype normalizer_name: str or ~azure.search.documents.indexes.models.LexicalNormalizerName + :keyword synonym_map_names: A list of the names of synonym maps to associate with this field. This option can be used only with searchable fields. Currently only one synonym map per field is supported. Assigning a synonym map to a field ensures that query terms targeting that field are expanded at query-time using the rules in the synonym map. This attribute can be changed on existing fields. Must be null or an empty collection for complex fields. - :type synonym_map_names: list[str] - :param fields: A list of sub-fields if this is a field of type Edm.ComplexType or + :paramtype synonym_map_names: list[str] + :keyword fields: A list of sub-fields if this is a field of type Edm.ComplexType or Collection(Edm.ComplexType). Must be null or empty for simple fields. - :type fields: list[~azure.search.documents.models.SearchField] + :paramtype fields: list[~azure.search.documents.models.SearchField] """ _validation = { @@ -268,34 +268,34 @@ def SimpleField(**kw): # type: (**Any) -> SearchField """Configure a simple field for an Azure Search Index - :param name: Required. The name of the field, which must be unique within the fields collection + :keyword name: Required. The name of the field, which must be unique within the fields collection of the index or parent field. - :type name: str - :param type: Required. The data type of the field. Possible values include: SearchFieldDataType.String, + :paramtype name: str + :keyword type: Required. The data type of the field. Possible values include: SearchFieldDataType.String, SearchFieldDataType.Int32, SearchFieldDataType.Int64, SearchFieldDataType.Double, SearchFieldDataType.Boolean, SearchFieldDataType.DateTimeOffset, SearchFieldDataType.GeographyPoint, SearchFieldDataType.ComplexType, from `azure.search.documents.SearchFieldDataType`. - :type type: str - :param key: A value indicating whether the field uniquely identifies documents in the index. + :paramtype type: str + :keyword key: A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type SearchFieldDataType.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is False - :type key: bool - :param hidden: A value indicating whether the field can be returned in a search result. + :paramtype key: bool + :keyword hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False. - :type hidden: bool - :param filterable: A value indicating whether to enable the field to be referenced in $filter + :paramtype hidden: bool + :keyword filterable: A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type SearchFieldDataType.String or Collection(SearchFieldDataType.String) that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. This property must be null for complex fields. Default is False - :type filterable: bool - :param sortable: A value indicating whether to enable the field to be referenced in $orderby + :paramtype filterable: bool + :keyword sortable: A value indicating whether to enable the field to be referenced in $orderby expressions. By default Azure Cognitive Search sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection @@ -303,13 +303,13 @@ def SimpleField(**kw): collections are also multi-valued, and therefore cannot be sortable. This is true whether it's an immediate parent field, or an ancestor field, that's the complex collection. The default is False. - :type sortable: bool - :param facetable: A value indicating whether to enable the field to be referenced in facet + :paramtype sortable: bool + :keyword facetable: A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). Fields of type SearchFieldDataType.GeographyPoint or Collection(SearchFieldDataType.GeographyPoint) cannot be facetable. Default is False. - :type facetable: bool + :paramtype facetable: bool """ result = {"name": kw.get("name"), "type": kw.get("type")} # type: Dict[str, Any] result["key"] = kw.get("key", False) @@ -325,24 +325,24 @@ def SearchableField(**kw): # type: (**Any) -> SearchField """Configure a searchable text field for an Azure Search Index - :param name: Required. The name of the field, which must be unique within the fields collection + :keyword name: Required. The name of the field, which must be unique within the fields collection of the index or parent field. - :type name: str - :param collection: Whether this search field is a collection (default False) - :type collection: bool - :param key: A value indicating whether the field uniquely identifies documents in the index. + :paramtype name: str + :keyword collection: Whether this search field is a collection (default False) + :paramtype collection: bool + :keyword key: A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type SearchFieldDataType.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is False - :type key: bool - :param hidden: A value indicating whether the field can be returned in a search result. + :paramtype key: bool + :keyword hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False. - :type hidden: bool - :param searchable: A value indicating whether the field is full-text searchable. This means it + :paramtype hidden: bool + :keyword searchable: A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and "day". This enables full-text searches for these terms. Note: searchable fields @@ -350,23 +350,23 @@ def SearchableField(**kw): tokenized version of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. Default is True. - :type searchable: bool - :param filterable: A value indicating whether to enable the field to be referenced in $filter + :paramtype searchable: bool + :keyword filterable: A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. Default is False. - :type filterable: bool - :param sortable: A value indicating whether to enable the field to be referenced in $orderby + :paramtype filterable: bool + :keyword sortable: A value indicating whether to enable the field to be referenced in $orderby expressions. By default Azure Cognitive Search sorts results by score, but in many experiences users will want to sort by fields in the documents. The default is true False. - :type sortable: bool - :param facetable: A value indicating whether to enable the field to be referenced in facet + :paramtype sortable: bool + :keyword facetable: A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). Default is False. - :type facetable: bool - :param analyzer_name: The name of the analyzer to use for the field. This option can't be set together + :paramtype facetable: bool + :keyword analyzer_name: The name of the analyzer to use for the field. This option can't be set together with either searchAnalyzer or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the field. Possible values include: 'ar.microsoft', 'ar.lucene', 'hy.lucene', 'bn.microsoft', 'eu.lucene', 'bg.microsoft', 'bg.lucene', 'ca.microsoft', 'ca.lucene', 'zh- @@ -385,8 +385,8 @@ def SearchableField(**kw): 'th.microsoft', 'th.lucene', 'tr.microsoft', 'tr.lucene', 'uk.microsoft', 'ur.microsoft', 'vi.microsoft', 'standard.lucene', 'standardasciifolding.lucene', 'keyword', 'pattern', 'simple', 'stop', 'whitespace'. - :type analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName - :param search_analyzer_name: The name of the analyzer used at search time for the field. It must be + :paramtype analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName + :keyword search_analyzer_name: The name of the analyzer used at search time for the field. It must be set together with indexAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. This analyzer can be updated on an existing field. Possible @@ -408,8 +408,8 @@ def SearchableField(**kw): 'th.lucene', 'tr.microsoft', 'tr.lucene', 'uk.microsoft', 'ur.microsoft', 'vi.microsoft', 'standard.lucene', 'standardasciifolding.lucene', 'keyword', 'pattern', 'simple', 'stop', 'whitespace'. - :type search_analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName - :param index_analyzer_name: The name of the analyzer used at indexing time for the field. + :paramtype search_analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName + :keyword index_analyzer_name: The name of the analyzer used at indexing time for the field. It must be set together with searchAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. Once the analyzer is chosen, it cannot be @@ -431,12 +431,12 @@ def SearchableField(**kw): 'th.lucene', 'tr.microsoft', 'tr.lucene', 'uk.microsoft', 'ur.microsoft', 'vi.microsoft', 'standard.lucene', 'standardasciifolding.lucene', 'keyword', 'pattern', 'simple', 'stop', 'whitespace'. - :type index_analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName - :param synonym_map_names: A list of the names of synonym maps to associate with this field. Currently + :paramtype index_analyzer_name: str or ~azure.search.documents.indexes.models.AnalyzerName + :keyword synonym_map_names: A list of the names of synonym maps to associate with this field. Currently only one synonym map per field is supported. Assigning a synonym map to a field ensures that query terms targeting that field are expanded at query-time using the rules in the synonym map. This attribute can be changed on existing fields. - :type synonym_map_names: list[str] + :paramtype synonym_map_names: list[str] """ typ = Collection(String) if kw.get("collection", False) else String @@ -463,14 +463,14 @@ def ComplexField(**kw): """Configure a Complex or Complex collection field for an Azure Search Index - :param name: Required. The name of the field, which must be unique within the fields collection + :keyword name: Required. The name of the field, which must be unique within the fields collection of the index or parent field. - :type name: str - :param collection: Whether this complex field is a collection (default False) - :type collection: bool - :type type: str or ~search_service_client.models.DataType - :param fields: A list of sub-fields - :type fields: list[~search_service_client.models.Field] + :paramtype name: str + :keyword collection: Whether this complex field is a collection (default False) + :paramtype collection: bool + :paramtype type: str or ~search_service_client.models.DataType + :keyword fields: A list of sub-fields + :paramtype fields: list[~search_service_client.models.Field] """ typ = Collection(ComplexType) if kw.get("collection", False) else ComplexType @@ -485,32 +485,32 @@ class SearchIndex(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the index. - :type name: str - :param fields: Required. The fields of the index. - :type fields: list[~azure.search.documents.indexes.models.SearchField] - :param scoring_profiles: The scoring profiles for the index. - :type scoring_profiles: list[~azure.search.documents.indexes.models.ScoringProfile] - :param default_scoring_profile: The name of the scoring profile to use if none is specified in + :keyword name: Required. The name of the index. + :paramtype name: str + :keyword fields: Required. The fields of the index. + :paramtype fields: list[~azure.search.documents.indexes.models.SearchField] + :keyword scoring_profiles: The scoring profiles for the index. + :paramtype scoring_profiles: list[~azure.search.documents.indexes.models.ScoringProfile] + :keyword default_scoring_profile: The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. - :type default_scoring_profile: str - :param cors_options: Options to control Cross-Origin Resource Sharing (CORS) for the index. - :type cors_options: ~azure.search.documents.indexes.models.CorsOptions - :param suggesters: The suggesters for the index. - :type suggesters: list[~azure.search.documents.indexes.models.SearchSuggester] - :param analyzers: The analyzers for the index. - :type analyzers: list[~azure.search.documents.indexes.models.LexicalAnalyzer] - :param tokenizers: The tokenizers for the index. - :type tokenizers: list[~azure.search.documents.indexes.models.LexicalTokenizer] - :param token_filters: The token filters for the index. - :type token_filters: list[~azure.search.documents.indexes.models.TokenFilter] - :param char_filters: The character filters for the index. - :type char_filters: list[~azure.search.documents.indexes.models.CharFilter] - :param normalizers: The normalizers for the index. - :type normalizers: + :paramtype default_scoring_profile: str + :keyword cors_options: Options to control Cross-Origin Resource Sharing (CORS) for the index. + :paramtype cors_options: ~azure.search.documents.indexes.models.CorsOptions + :keyword suggesters: The suggesters for the index. + :paramtype suggesters: list[~azure.search.documents.indexes.models.SearchSuggester] + :keyword analyzers: The analyzers for the index. + :paramtype analyzers: list[~azure.search.documents.indexes.models.LexicalAnalyzer] + :keyword tokenizers: The tokenizers for the index. + :paramtype tokenizers: list[~azure.search.documents.indexes.models.LexicalTokenizer] + :keyword token_filters: The token filters for the index. + :paramtype token_filters: list[~azure.search.documents.indexes.models.TokenFilter] + :keyword char_filters: The character filters for the index. + :paramtype char_filters: list[~azure.search.documents.indexes.models.CharFilter] + :keyword normalizers: The normalizers for the index. + :paramtype normalizers: list[~azure.search.documents.indexes.models.LexicalNormalizer] - :param encryption_key: A description of an encryption key that you create in Azure Key Vault. + :keyword encryption_key: A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data in Azure Cognitive Search. Once you have encrypted your data, it will always remain encrypted. Azure Cognitive @@ -518,14 +518,14 @@ class SearchIndex(msrest.serialization.Model): needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - :type encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey - :param similarity: The type of similarity algorithm to be used when scoring and ranking the + :paramtype encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey + :keyword similarity: The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. - :type similarity: ~azure.search.documents.indexes.models.SimilarityAlgorithm - :param e_tag: The ETag of the index. - :type e_tag: str + :paramtype similarity: ~azure.search.documents.indexes.models.SimilarityAlgorithm + :keyword e_tag: The ETag of the index. + :paramtype e_tag: str """ _validation = { diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_models.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_models.py index 36d75590f017..9a88f0e0b1b7 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_models.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/_models.py @@ -34,22 +34,22 @@ class SearchIndexerSkillset(_SearchIndexerSkillset): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the skillset. - :type name: str - :param description: The description of the skillset. - :type description: str - :param skills: Required. A list of skills in the skillset. - :type skills: list[~azure.search.documents.indexes.models.SearchIndexerSkill] - :param cognitive_services_account: Details about cognitive services to be used when running + :keyword name: Required. The name of the skillset. + :paramtype name: str + :keyword description: The description of the skillset. + :paramtype description: str + :keyword skills: Required. A list of skills in the skillset. + :paramtype skills: list[~azure.search.documents.indexes.models.SearchIndexerSkill] + :keyword cognitive_services_account: Details about cognitive services to be used when running skills. - :type cognitive_services_account: + :paramtype cognitive_services_account: ~azure.search.documents.indexes.models.CognitiveServicesAccount - :param knowledge_store: Definition of additional projections to azure blob, table, or files, of + :keyword knowledge_store: Definition of additional projections to azure blob, table, or files, of enriched data. - :type knowledge_store: ~azure.search.documents.indexes.models.SearchIndexerKnowledgeStore - :param e_tag: The ETag of the skillset. - :type e_tag: str - :param encryption_key: A description of an encryption key that you create in Azure Key Vault. + :paramtype knowledge_store: ~azure.search.documents.indexes.models.SearchIndexerKnowledgeStore + :keyword e_tag: The ETag of the skillset. + :paramtype e_tag: str + :keyword encryption_key: A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition in Azure Cognitive Search. Once you have encrypted your skillset @@ -58,7 +58,7 @@ class SearchIndexerSkillset(_SearchIndexerSkillset): encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - :type encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey + :paramtype encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey """ def __init__( @@ -118,48 +118,48 @@ class EntityRecognitionSkill(SearchIndexerSkill): All required parameters must be populated in order to send to Azure. - :param odata_type: Required. Identifies the concrete type of the skill.Constant filled by + :keyword odata_type: Required. Identifies the concrete type of the skill.Constant filled by server. - :type odata_type: str - :param name: The name of the skill which uniquely identifies it within the skillset. A skill + :paramtype odata_type: str + :keyword name: The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - :type name: str - :param description: The description of the skill which describes the inputs, outputs, and usage + :paramtype name: str + :keyword description: The description of the skill which describes the inputs, outputs, and usage of the skill. - :type description: str - :param context: Represents the level at which operations take place, such as the document root + :paramtype description: str + :keyword context: Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - :type context: str - :param inputs: Required. Inputs of the skills could be a column in the source data set, or the + :paramtype context: str + :keyword inputs: Required. Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - :type inputs: list[~azure.search.documents.indexes.models.InputFieldMappingEntry] - :param outputs: Required. The output of a skill is either a field in a search index, or a value + :paramtype inputs: list[~azure.search.documents.indexes.models.InputFieldMappingEntry] + :keyword outputs: Required. The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - :type outputs: list[~azure.search.documents.indexes.models.OutputFieldMappingEntry] - :param categories: A list of entity categories that should be extracted. - :type categories: list[str or ~azure.search.documents.indexes.models.EntityCategory] - :param default_language_code: A value indicating which language code to use. Default is en. + :paramtype outputs: list[~azure.search.documents.indexes.models.OutputFieldMappingEntry] + :keyword categories: A list of entity categories that should be extracted. + :paramtype categories: list[str or ~azure.search.documents.indexes.models.EntityCategory] + :keyword default_language_code: A value indicating which language code to use. Default is en. Possible values include: "ar", "cs", "zh-Hans", "zh-Hant", "da", "nl", "en", "fi", "fr", "de", "el", "hu", "it", "ja", "ko", "no", "pl", "pt-PT", "pt-BR", "ru", "es", "sv", "tr". - :type default_language_code: str or + :paramtype default_language_code: str or ~azure.search.documents.indexes.models.EntityRecognitionSkillLanguage - :param include_typeless_entities: Determines whether or not to include entities which are well + :keyword include_typeless_entities: Determines whether or not to include entities which are well known but don't conform to a pre-defined type. If this configuration is not set (default), set to null or set to false, entities which don't conform to one of the pre-defined types will not be surfaced. Only valid for skill version 1. - :type include_typeless_entities: bool - :param minimum_precision: A value between 0 and 1 that be used to only include entities whose + :paramtype include_typeless_entities: bool + :keyword minimum_precision: A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - :type minimum_precision: float - :param model_version: The version of the model to use when calling the Text Analytics service. + :paramtype minimum_precision: float + :keyword model_version: The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. Only valid from skill version 3. - :type model_version: str - :param skill_version: The version of the skill to use when calling the Text Analytics service. + :paramtype model_version: str + :keyword skill_version: The version of the skill to use when calling the Text Analytics service. It will default to V1 when not specified. - :type skill_version: ~azure.search.documents.indexes.models.EntityRecognitionSkillVersion + :paramtype skill_version: ~azure.search.documents.indexes.models.EntityRecognitionSkillVersion """ _validation = { @@ -181,6 +181,7 @@ class EntityRecognitionSkill(SearchIndexerSkill): 'include_typeless_entities': {'key': 'includeTypelessEntities', 'type': 'bool'}, 'minimum_precision': {'key': 'minimumPrecision', 'type': 'float'}, 'model_version': {'key': 'modelVersion', 'type': 'str'}, + 'skill_version': {'key': 'skillVersion', 'type': 'str'} } def __init__( @@ -210,8 +211,7 @@ def _to_generated(self): categories=self.categories, default_language_code=self.default_language_code, include_typeless_entities=self.include_typeless_entities, - minimum_precision=self.minimum_precision, - model_version=self.model_version + minimum_precision=self.minimum_precision ) if self.skill_version in [EntityRecognitionSkillVersion.V3, EntityRecognitionSkillVersion.LATEST]: return _EntityRecognitionSkillV3( @@ -221,7 +221,6 @@ def _to_generated(self): odata_type=self.odata_type, categories=self.categories, default_language_code=self.default_language_code, - include_typeless_entities=self.include_typeless_entities, minimum_precision=self.minimum_precision, model_version=self.model_version ) @@ -265,41 +264,41 @@ class SentimentSkill(SearchIndexerSkill): All required parameters must be populated in order to send to Azure. - :param odata_type: Required. Identifies the concrete type of the skill.Constant filled by + :keyword odata_type: Required. Identifies the concrete type of the skill.Constant filled by server. - :type odata_type: str - :param name: The name of the skill which uniquely identifies it within the skillset. A skill + :paramtype odata_type: str + :keyword name: The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - :type name: str - :param description: The description of the skill which describes the inputs, outputs, and usage + :paramtype name: str + :keyword description: The description of the skill which describes the inputs, outputs, and usage of the skill. - :type description: str - :param context: Represents the level at which operations take place, such as the document root + :paramtype description: str + :keyword context: Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - :type context: str - :param inputs: Required. Inputs of the skills could be a column in the source data set, or the + :paramtype context: str + :keyword inputs: Required. Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - :type inputs: list[~azure.search.documents.indexes.models.InputFieldMappingEntry] - :param outputs: Required. The output of a skill is either a field in a search index, or a value + :paramtype inputs: list[~azure.search.documents.indexes.models.InputFieldMappingEntry] + :keyword outputs: Required. The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - :type outputs: list[~azure.search.documents.indexes.models.OutputFieldMappingEntry] - :param default_language_code: A value indicating which language code to use. Default is en. + :paramtype outputs: list[~azure.search.documents.indexes.models.OutputFieldMappingEntry] + :keyword default_language_code: A value indicating which language code to use. Default is en. Possible values include: "da", "nl", "en", "fi", "fr", "de", "el", "it", "no", "pl", "pt-PT", "ru", "es", "sv", "tr". - :type default_language_code: str or + :paramtype default_language_code: str or ~azure.search.documents.indexes.models.SentimentSkillLanguage - :param include_opinion_mining: If set to true, the skill output will include information from + :keyword include_opinion_mining: If set to true, the skill output will include information from Text Analytics for opinion mining, namely targets (nouns or verbs) and their associated assessment (adjective) in the text. Default is false. - :type include_opinion_mining: bool - :param model_version: The version of the model to use when calling the Text Analytics service. + :paramtype include_opinion_mining: bool + :keyword model_version: The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - :type model_version: str - :param skill_version: The version of the skill to use when calling the Text Analytics service. + :paramtype model_version: str + :keyword skill_version: The version of the skill to use when calling the Text Analytics service. It will default to V1 when not specified. - :type skill_version: ~azure.search.documents.indexes.models.SentimentSkillVersion + :paramtype skill_version: ~azure.search.documents.indexes.models.SentimentSkillVersion """ _validation = { @@ -318,6 +317,7 @@ class SentimentSkill(SearchIndexerSkill): 'default_language_code': {'key': 'defaultLanguageCode', 'type': 'str'}, 'include_opinion_mining': {'key': 'includeOpinionMining', 'type': 'bool'}, 'model_version': {'key': 'modelVersion', 'type': 'str'}, + 'skill_version': {'key': 'skillVersion', 'type': 'str'} } def __init__( @@ -331,7 +331,7 @@ def __init__( self.skill_version = skill_version self.odata_type = self.skill_version # type: str self.default_language_code = kwargs.get('default_language_code', None) - self.include_opinion_mining = kwargs.get('include_opinion_mining', False) + self.include_opinion_mining = kwargs.get('include_opinion_mining', None) self.model_version = kwargs.get('model_version', None) def _to_generated(self): @@ -341,10 +341,8 @@ def _to_generated(self): outputs=self.outputs, name=self.name, odata_type=self.odata_type, - default_language_code=self.default_language_code, - include_opinion_mining=self.include_opinion_mining, - model_version=self.model_version - ) + default_language_code=self.default_language_code + ) if self.skill_version in [SentimentSkillVersion.V3, SentimentSkillVersion.LATEST]: return _SentimentSkillV3( inputs=self.inputs, @@ -380,9 +378,9 @@ class AnalyzeTextOptions(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param text: Required. The text to break into tokens. - :type text: str - :param analyzer_name: The name of the analyzer to use to break the given text. If this parameter is + :keyword text: Required. The text to break into tokens. + :paramtype text: str + :keyword analyzer_name: The name of the analyzer to use to break the given text. If this parameter is not specified, you must specify a tokenizer instead. The tokenizer and analyzer parameters are mutually exclusive. Possible values include: "ar.microsoft", "ar.lucene", "hy.lucene", "bn.microsoft", "eu.lucene", "bg.microsoft", "bg.lucene", "ca.microsoft", "ca.lucene", "zh- @@ -401,22 +399,22 @@ class AnalyzeTextOptions(msrest.serialization.Model): "th.microsoft", "th.lucene", "tr.microsoft", "tr.lucene", "uk.microsoft", "ur.microsoft", "vi.microsoft", "standard.lucene", "standardasciifolding.lucene", "keyword", "pattern", "simple", "stop", "whitespace". - :type analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName - :param tokenizer_name: The name of the tokenizer to use to break the given text. If this parameter + :paramtype analyzer_name: str or ~azure.search.documents.indexes.models.LexicalAnalyzerName + :keyword tokenizer_name: The name of the tokenizer to use to break the given text. If this parameter is not specified, you must specify an analyzer instead. The tokenizer and analyzer parameters are mutually exclusive. Possible values include: "classic", "edgeNGram", "keyword_v2", "letter", "lowercase", "microsoft_language_tokenizer", "microsoft_language_stemming_tokenizer", "nGram", "path_hierarchy_v2", "pattern", "standard_v2", "uax_url_email", "whitespace". - :type tokenizer_name: str or ~azure.search.documents.indexes.models.LexicalTokenizerName - :param normalizer_name: The name of the normalizer to use to normalize the given text. Possible + :paramtype tokenizer_name: str or ~azure.search.documents.indexes.models.LexicalTokenizerName + :keyword normalizer_name: The name of the normalizer to use to normalize the given text. Possible values include: "asciifolding", "elision", "lowercase", "standard", "uppercase". - :type normalizer_name: str or ~azure.search.documents.indexes.models.LexicalNormalizerName - :param token_filters: An optional list of token filters to use when breaking the given text. + :paramtype normalizer_name: str or ~azure.search.documents.indexes.models.LexicalNormalizerName + :keyword token_filters: An optional list of token filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. - :type token_filters: list[str or ~azure.search.documents.indexes.models.TokenFilterName] - :param char_filters: An optional list of character filters to use when breaking the given text. + :paramtype token_filters: list[str or ~azure.search.documents.indexes.models.TokenFilterName] + :keyword char_filters: An optional list of character filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. - :type char_filters: list[str] + :paramtype char_filters: list[str] """ _validation = { @@ -460,27 +458,27 @@ class CustomAnalyzer(LexicalAnalyzer): All required parameters must be populated in order to send to Azure. - :param odata_type: Required. Identifies the concrete type of the analyzer.Constant filled by + :keyword odata_type: Required. Identifies the concrete type of the analyzer.Constant filled by server. - :type odata_type: str - :param name: Required. The name of the analyzer. It must only contain letters, digits, spaces, + :paramtype odata_type: str + :keyword name: Required. The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - :type name: str - :param tokenizer_name: Required. The name of the tokenizer to use to divide continuous text into a + :paramtype name: str + :keyword tokenizer_name: Required. The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as breaking a sentence into words. Possible values include: "classic", "edgeNGram", "keyword_v2", "letter", "lowercase", "microsoft_language_tokenizer", "microsoft_language_stemming_tokenizer", "nGram", "path_hierarchy_v2", "pattern", "standard_v2", "uax_url_email", "whitespace". - :type tokenizer_name: str or ~azure.search.documents.indexes.models.LexicalTokenizerName - :param token_filters: A list of token filters used to filter out or modify the tokens generated + :paramtype tokenizer_name: str or ~azure.search.documents.indexes.models.LexicalTokenizerName + :keyword token_filters: A list of token filters used to filter out or modify the tokens generated by a tokenizer. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. - :type token_filters: list[str or ~azure.search.documents.indexes.models.TokenFilterName] - :param char_filters: A list of character filters used to prepare input text before it is + :paramtype token_filters: list[str or ~azure.search.documents.indexes.models.TokenFilterName] + :keyword char_filters: A list of character filters used to prepare input text before it is processed by the tokenizer. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. - :type char_filters: list[str] + :paramtype char_filters: list[str] """ _validation = { @@ -532,21 +530,21 @@ class PatternAnalyzer(LexicalAnalyzer): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the analyzer. It must only contain letters, digits, spaces, + :keyword name: Required. The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - :type name: str - :param lower_case_terms: A value indicating whether terms should be lower-cased. Default is + :paramtype name: str + :keyword lower_case_terms: A value indicating whether terms should be lower-cased. Default is true. - :type lower_case_terms: bool - :param pattern: A regular expression to match token separators. Default is an + :paramtype lower_case_terms: bool + :keyword pattern: A regular expression to match token separators. Default is an expression that matches one or more white space characters. - :type pattern: str - :param flags: List of regular expression flags. Possible values of each flag include: 'CANON_EQ', + :paramtype pattern: str + :keyword flags: List of regular expression flags. Possible values of each flag include: 'CANON_EQ', 'CASE_INSENSITIVE', 'COMMENTS', 'DOTALL', 'LITERAL', 'MULTILINE', 'UNICODE_CASE', 'UNIX_LINES'. - :type flags: list[str] or list[~search_service_client.models.RegexFlags] - :param stopwords: A list of stopwords. - :type stopwords: list[str] + :paramtype flags: list[str] or list[~search_service_client.models.RegexFlags] + :keyword stopwords: A list of stopwords. + :paramtype stopwords: list[str] """ _validation = {"odata_type": {"required": True}, "name": {"required": True}} @@ -604,20 +602,20 @@ class PatternTokenizer(LexicalTokenizer): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the tokenizer. It must only contain letters, digits, spaces, + :keyword name: Required. The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - :type name: str - :param pattern: A regular expression to match token separators. Default is an + :paramtype name: str + :keyword pattern: A regular expression to match token separators. Default is an expression that matches one or more white space characters. - :type pattern: str - :param flags: List of regular expression flags. Possible values of each flag include: 'CANON_EQ', + :paramtype pattern: str + :keyword flags: List of regular expression flags. Possible values of each flag include: 'CANON_EQ', 'CASE_INSENSITIVE', 'COMMENTS', 'DOTALL', 'LITERAL', 'MULTILINE', 'UNICODE_CASE', 'UNIX_LINES'. - :type flags: list[str] or list[~search_service_client.models.RegexFlags] - :param group: The zero-based ordinal of the matching group in the regular expression to + :paramtype flags: list[str] or list[~search_service_client.models.RegexFlags] + :keyword group: The zero-based ordinal of the matching group in the regular expression to extract into tokens. Use -1 if you want to use the entire pattern to split the input into tokens, irrespective of matching groups. Default is -1. - :type group: int + :paramtype group: int """ _validation = {"odata_type": {"required": True}, "name": {"required": True}} @@ -671,22 +669,22 @@ class SearchResourceEncryptionKey(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param key_name: Required. The name of your Azure Key Vault key to be used to encrypt your data + :keyword key_name: Required. The name of your Azure Key Vault key to be used to encrypt your data at rest. - :type key_name: str - :param key_version: Required. The version of your Azure Key Vault key to be used to encrypt + :paramtype key_name: str + :keyword key_version: Required. The version of your Azure Key Vault key to be used to encrypt your data at rest. - :type key_version: str - :param vault_uri: Required. The URI of your Azure Key Vault, also referred to as DNS name, that + :paramtype key_version: str + :keyword vault_uri: Required. The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be https://my- keyvault-name.vault.azure.net. - :type vault_uri: str - :param application_id: Required. An AAD Application ID that was granted the required access + :paramtype vault_uri: str + :keyword application_id: Required. An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. - :type application_id: str - :param application_secret: The authentication key of the specified AAD application. - :type application_secret: str + :paramtype application_id: str + :keyword application_secret: The authentication key of the specified AAD application. + :paramtype application_secret: str """ _validation = { @@ -756,15 +754,15 @@ class SynonymMap(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the synonym map. - :type name: str + :keyword name: Required. The name of the synonym map. + :paramtype name: str :ivar format: Required. The format of the synonym map. Only the 'solr' format is currently supported. Default value: "solr". :vartype format: str - :param synonyms: Required. A series of synonym rules in the specified synonym map format. The + :keyword synonyms: Required. A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. - :type synonyms: list[str] - :param encryption_key: A description of an encryption key that you create in Azure Key Vault. + :paramtype synonyms: list[str] + :keyword encryption_key: A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data in Azure Cognitive Search. Once you have encrypted your data, it will always remain encrypted. Azure Cognitive @@ -772,9 +770,9 @@ class SynonymMap(msrest.serialization.Model): needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - :type encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey - :param e_tag: The ETag of the synonym map. - :type e_tag: str + :paramtype encryption_key: ~azure.search.documents.indexes.models.SearchResourceEncryptionKey + :keyword e_tag: The ETag of the synonym map. + :paramtype e_tag: str """ _validation = { @@ -833,24 +831,24 @@ class SearchIndexerDataSourceConnection(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the datasource connection. - :type name: str - :param description: The description of the datasource connection. - :type description: str - :param type: Required. The type of the datasource connection. Possible values include: "azuresql", + :keyword name: Required. The name of the datasource connection. + :paramtype name: str + :keyword description: The description of the datasource connection. + :paramtype description: str + :keyword type: Required. The type of the datasource connection. Possible values include: "azuresql", "cosmosdb", "azureblob", "azuretable", "mysql", "adlsgen2". - :type type: str or ~azure.search.documents.indexes.models.SearchIndexerDataSourceType - :param connection_string: The connection string for the datasource connection. - :type connection_string: str - :param container: Required. The data container for the datasource connection. - :type container: ~azure.search.documents.indexes.models.SearchIndexerDataContainer - :param data_change_detection_policy: The data change detection policy for the datasource connection. - :type data_change_detection_policy: ~azure.search.documents.models.DataChangeDetectionPolicy - :param data_deletion_detection_policy: The data deletion detection policy for the datasource connection. - :type data_deletion_detection_policy: + :paramtype type: str or ~azure.search.documents.indexes.models.SearchIndexerDataSourceType + :keyword connection_string: The connection string for the datasource connection. + :paramtype connection_string: str + :keyword container: Required. The data container for the datasource connection. + :paramtype container: ~azure.search.documents.indexes.models.SearchIndexerDataContainer + :keyword data_change_detection_policy: The data change detection policy for the datasource connection. + :paramtype data_change_detection_policy: ~azure.search.documents.models.DataChangeDetectionPolicy + :keyword data_deletion_detection_policy: The data deletion detection policy for the datasource connection. + :paramtype data_deletion_detection_policy: ~azure.search.documents.models.DataDeletionDetectionPolicy - :param e_tag: The ETag of the data source. - :type e_tag: str + :keyword e_tag: The ETag of the data source. + :paramtype e_tag: str """ _validation = { diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset.yaml index beac00b106ed..f3fc2646770f 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset.yaml @@ -18,12 +18,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://searche2bf1c71.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6871C89346\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977BF242A141A\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -32,11 +32,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:21:51 GMT + - Tue, 14 Sep 2021 20:35:07 GMT elapsed-time: - - '1433' + - '2124' etag: - - W/"0x8D96E6871C89346" + - W/"0x8D977BF242A141A" expires: - '-1' location: @@ -48,7 +48,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 8ded272a-0c44-11ec-ac43-74c63bed1137 + - 3ff59952-159b-11ec-bda5-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -73,12 +73,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://searche2bf1c71.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6871DB3571\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977BF243E8A49\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -87,11 +87,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:21:51 GMT + - Tue, 14 Sep 2021 20:35:07 GMT elapsed-time: - - '46' + - '85' etag: - - W/"0x8D96E6871DB3571" + - W/"0x8D977BF243E8A49" expires: - '-1' odata-version: @@ -101,7 +101,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 8ef2919c-0c44-11ec-a88c-74c63bed1137 + - 41558274-159b-11ec-bc0f-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -119,12 +119,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searche2bf1c71.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E6871DB3571\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977BF243E8A49\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -133,9 +133,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:21:51 GMT + - Tue, 14 Sep 2021 20:35:07 GMT elapsed-time: - - '139' + - '69' expires: - '-1' odata-version: @@ -145,7 +145,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 8f03c3cb-0c44-11ec-94e8-74c63bed1137 + - 4169c877-159b-11ec-9f5a-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -163,12 +163,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searche2bf1c71.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6871DB3571\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searche2bf1c71.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977BF243E8A49\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -177,11 +177,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:21:51 GMT + - Tue, 14 Sep 2021 20:35:07 GMT elapsed-time: - - '13' + - '15' etag: - - W/"0x8D96E6871DB3571" + - W/"0x8D977BF243E8A49" expires: - '-1' odata-version: @@ -191,7 +191,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 8f22fc82-0c44-11ec-8c37-74c63bed1137 + - 417b5016-159b-11ec-acf0-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_if_unchanged.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_if_unchanged.yaml index 13b7ebb493f8..7f3fecf24c0b 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_if_unchanged.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_if_unchanged.yaml @@ -18,12 +18,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://search792321ab.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E687D620A16\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977CB70C7008A\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -32,11 +32,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:10 GMT + - Tue, 14 Sep 2021 22:03:10 GMT elapsed-time: - - '1575' + - '2131' etag: - - W/"0x8D96E687D620A16" + - W/"0x8D977CB70C7008A" expires: - '-1' location: @@ -48,7 +48,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 9973fe7a-0c44-11ec-88b9-74c63bed1137 + - 8ca7995e-15a7-11ec-aa34-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -73,12 +73,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://search792321ab.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E687D79414B\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977CB70D7F380\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -87,11 +87,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:10 GMT + - Tue, 14 Sep 2021 22:03:10 GMT elapsed-time: - - '55' + - '61' etag: - - W/"0x8D96E687D79414B" + - W/"0x8D977CB70D7F380" expires: - '-1' odata-version: @@ -101,7 +101,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 9a8fb59e-0c44-11ec-b124-74c63bed1137 + - 8e04f88a-15a7-11ec-8243-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -119,12 +119,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://search792321ab.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E687D79414B\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://search792321ab.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977CB70D7F380\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -133,9 +133,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:10 GMT + - Tue, 14 Sep 2021 22:03:10 GMT elapsed-time: - - '38' + - '75' expires: - '-1' odata-version: @@ -145,7 +145,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - 9aa208dc-0c44-11ec-90cd-74c63bed1137 + - 8e159a1f-15a7-11ec-bf98-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_inplace.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_inplace.yaml index ac139c653307..1fd4894cba5c 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_inplace.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_or_update_skillset_inplace.yaml @@ -18,12 +18,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://searchd4ef1fac.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6887F54272\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C87A42B270\"","name":"test-ss","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -32,11 +32,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:28 GMT + - Tue, 14 Sep 2021 21:41:57 GMT elapsed-time: - - '1212' + - '1916' etag: - - W/"0x8D96E6887F54272" + - W/"0x8D977C87A42B270" expires: - '-1' location: @@ -48,7 +48,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - a4424b77-0c44-11ec-881a-74c63bed1137 + - 963d1c9d-15a4-11ec-9672-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -73,12 +73,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://searchd4ef1fac.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6888074840\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C87A5B209B\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -87,11 +87,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:28 GMT + - Tue, 14 Sep 2021 21:41:57 GMT elapsed-time: - - '47' + - '79' etag: - - W/"0x8D96E6888074840" + - W/"0x8D977C87A5B209B" expires: - '-1' odata-version: @@ -101,7 +101,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - a51fabef-0c44-11ec-b060-74c63bed1137 + - 977f2425-15a4-11ec-ac05-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -119,12 +119,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchd4ef1fac.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E6888074840\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977C87A5B209B\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -133,9 +133,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:28 GMT + - Tue, 14 Sep 2021 21:41:57 GMT elapsed-time: - - '36' + - '67' expires: - '-1' odata-version: @@ -145,7 +145,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - a52fad4e-0c44-11ec-b35f-74c63bed1137 + - 97945ba1-15a4-11ec-95bc-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -163,12 +163,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchd4ef1fac.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6888074840\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchd4ef1fac.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C87A5B209B\"","name":"test-ss","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -177,11 +177,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:28 GMT + - Tue, 14 Sep 2021 21:41:57 GMT elapsed-time: - - '11' + - '14' etag: - - W/"0x8D96E6888074840" + - W/"0x8D977C87A5B209B" expires: - '-1' odata-version: @@ -191,7 +191,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - a53c5426-0c44-11ec-bf70-74c63bed1137 + - 97a7b2a7-15a4-11ec-aed2-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_skillset.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_skillset.yaml index ebb45a75662b..0e61e15a6ac6 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_skillset.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_create_skillset.yaml @@ -1,51 +1,4 @@ interactions: -- request: - body: null - headers: - Accept: - - application/json;odata.metadata=minimal - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '0' - User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) - method: DELETE - uri: https://searchd998184f.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview - response: - body: - string: '{"error":{"code":"","message":"No skillset with the name ''test-ss'' - was found in a service named ''searchd998184f''."}}' - headers: - cache-control: - - no-cache - content-language: - - en - content-length: - - '116' - content-type: - - application/json; odata.metadata=minimal - date: - - Thu, 02 Sep 2021 23:22:43 GMT - elapsed-time: - - '38' - expires: - - '-1' - odata-version: - - '4.0' - pragma: - - no-cache - preference-applied: - - odata.include-annotations="*" - request-id: - - adcd5521-0c44-11ec-99c1-74c63bed1137 - strict-transport-security: - - max-age=15724800; includeSubDomains - status: - code: 404 - message: Not Found - request: body: '{"name": "test-ss", "description": "desc", "skills": [{"@odata.type": "#Microsoft.Skills.Text.EntityRecognitionSkill", "description": "Skill Version 1", "inputs": [{"name": "text", "source": "/document/content"}], @@ -73,12 +26,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://searchd998184f.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd998184f.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6890F19808\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":"Skill + string: '{"@odata.context":"https://searchd998184f.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C13CD227F9\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":"Skill Version 1","context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":true,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizationsS1"}]},{"@odata.type":"#Microsoft.Skills.Text.V3.EntityRecognitionSkill","name":null,"description":"Skill Version 3","context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"modelVersion":"3","inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizationsS2"}]},{"@odata.type":"#Microsoft.Skills.Text.SentimentSkill","name":null,"description":"Sentiment V1","context":null,"defaultLanguageCode":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"score","targetName":"scoreS3"}]},{"@odata.type":"#Microsoft.Skills.Text.V3.SentimentSkill","name":null,"description":"Sentiment @@ -91,11 +44,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:43 GMT + - Tue, 14 Sep 2021 20:50:08 GMT elapsed-time: - - '104' + - '2192' etag: - - W/"0x8D96E6890F19808" + - W/"0x8D977C13CD227F9" expires: - '-1' location: @@ -107,7 +60,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - ae01135c-0c44-11ec-9160-74c63bed1137 + - 5895d1df-159d-11ec-ab13-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -123,12 +76,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchd998184f.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd998184f.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E6890F19808\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":"Skill + string: '{"@odata.context":"https://searchd998184f.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977C13CD227F9\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":"Skill Version 1","context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":true,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizationsS1"}]},{"@odata.type":"#Microsoft.Skills.Text.V3.EntityRecognitionSkill","name":"#2","description":"Skill Version 3","context":"/document","categories":["Product","Phone Number","Person","Quantity","IP Address","Organization","URL","Email","Event","Skill","Location","PersonType","Address","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"modelVersion":"3","inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizationsS2"}]},{"@odata.type":"#Microsoft.Skills.Text.SentimentSkill","name":"#3","description":"Sentiment @@ -142,9 +95,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:43 GMT + - Tue, 14 Sep 2021 20:50:08 GMT elapsed-time: - - '40' + - '118' expires: - '-1' odata-version: @@ -154,7 +107,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - ae1b2686-0c44-11ec-b21b-74c63bed1137 + - 59ff6a42-159d-11ec-b22c-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset.yaml index 2938135169a2..6014d767b83b 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset.yaml @@ -15,12 +15,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://searchd97c184e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd97c184e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E6899DB4B6E\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchd97c184e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C15538E247\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -29,11 +29,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:57 GMT + - Tue, 14 Sep 2021 20:50:49 GMT elapsed-time: - - '40' + - '2250' etag: - - W/"0x8D96E6899DB4B6E" + - W/"0x8D977C15538E247" expires: - '-1' location: @@ -45,7 +45,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - b6c2e706-0c44-11ec-b97f-74c63bed1137 + - 70f8e527-159d-11ec-8600-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -61,12 +61,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchd97c184e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchd97c184e.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E6899DB4B6E\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://searchd97c184e.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977C15538E247\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -75,9 +75,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:57 GMT + - Tue, 14 Sep 2021 20:50:49 GMT elapsed-time: - - '17' + - '92' expires: - '-1' odata-version: @@ -87,7 +87,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - b704089c-0c44-11ec-8ba2-74c63bed1137 + - 72684c35-159d-11ec-9149-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -107,7 +107,7 @@ interactions: Content-Length: - '0' User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: DELETE uri: https://searchd97c184e.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: @@ -117,15 +117,15 @@ interactions: cache-control: - no-cache date: - - Thu, 02 Sep 2021 23:22:57 GMT + - Tue, 14 Sep 2021 20:50:49 GMT elapsed-time: - - '25' + - '40' expires: - '-1' pragma: - no-cache request-id: - - b710b746-0c44-11ec-83f1-74c63bed1137 + - 727d7ca6-159d-11ec-9b97-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -141,7 +141,7 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchd97c184e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: @@ -155,9 +155,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:22:58 GMT + - Tue, 14 Sep 2021 20:50:49 GMT elapsed-time: - - '6' + - '9' expires: - '-1' odata-version: @@ -167,7 +167,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - b71bb2dd-0c44-11ec-b98e-74c63bed1137 + - 728a490a-159d-11ec-bc59-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset_if_unchanged.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset_if_unchanged.yaml index 7bfa6376fc3d..86f1bf8de282 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset_if_unchanged.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_delete_skillset_if_unchanged.yaml @@ -15,12 +15,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://search3a191d88.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search3a191d88.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68A28D3F51\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search3a191d88.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C34C46233E\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -29,11 +29,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:12 GMT + - Tue, 14 Sep 2021 21:04:53 GMT elapsed-time: - - '85' + - '2226' etag: - - W/"0x8D96E68A28D3F51" + - W/"0x8D977C34C46233E" expires: - '-1' location: @@ -45,7 +45,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - bf84a894-0c44-11ec-a6bb-74c63bed1137 + - 680ad3a0-159f-11ec-9d34-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -70,12 +70,12 @@ interactions: Prefer: - return=representation User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: PUT uri: https://search3a191d88.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search3a191d88.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68A29C3723\"","name":"test-ss","description":"updated","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search3a191d88.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C34C593993\"","name":"test-ss","description":"updated","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -84,11 +84,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:12 GMT + - Tue, 14 Sep 2021 21:04:53 GMT elapsed-time: - - '51' + - '68' etag: - - W/"0x8D96E68A29C3723" + - W/"0x8D977C34C593993" expires: - '-1' odata-version: @@ -98,7 +98,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - bfb5a199-0c44-11ec-9ff5-74c63bed1137 + - 69782ec6-159f-11ec-ab05-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -118,9 +118,9 @@ interactions: Content-Length: - '0' If-Match: - - '"0x8D96E68A28D3F51"' + - '"0x8D977C34C46233E"' User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: DELETE uri: https://search3a191d88.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: @@ -138,9 +138,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:12 GMT + - Tue, 14 Sep 2021 21:04:53 GMT elapsed-time: - - '13' + - '20' expires: - '-1' odata-version: @@ -150,7 +150,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - bfc58d8e-0c44-11ec-b48e-74c63bed1137 + - 698a3438-159f-11ec-8044-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillset.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillset.yaml index c772ef5316ed..71191fd5bb71 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillset.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillset.yaml @@ -15,12 +15,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://search9274171b.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68AB8A58B0\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C3591A39FF\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -29,11 +29,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:27 GMT + - Tue, 14 Sep 2021 21:05:14 GMT elapsed-time: - - '66' + - '2058' etag: - - W/"0x8D96E68AB8A58B0" + - W/"0x8D977C3591A39FF" expires: - '-1' location: @@ -45,7 +45,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - c8807327-0c44-11ec-8f54-74c63bed1137 + - 74f3cf64-159f-11ec-886a-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -61,12 +61,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://search9274171b.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E68AB8A58B0\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977C3591A39FF\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -75,9 +75,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:27 GMT + - Tue, 14 Sep 2021 21:05:14 GMT elapsed-time: - - '16' + - '82' expires: - '-1' odata-version: @@ -87,7 +87,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - c8b283be-0c44-11ec-a75e-74c63bed1137 + - 764ad3c7-159f-11ec-84fe-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: @@ -105,12 +105,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://search9274171b.search.windows.net/skillsets('test-ss')?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68AB8A58B0\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://search9274171b.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C3591A39FF\"","name":"test-ss","description":"desc","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -119,11 +119,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:27 GMT + - Tue, 14 Sep 2021 21:05:14 GMT elapsed-time: - - '10' + - '17' etag: - - W/"0x8D96E68AB8A58B0" + - W/"0x8D977C3591A39FF" expires: - '-1' odata-version: @@ -133,7 +133,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - c8bbb142-0c44-11ec-bccc-74c63bed1137 + - 765f5d84-159f-11ec-be78-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillsets.yaml b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillsets.yaml index 68bbda1f01c7..38903fbd94fc 100644 --- a/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillsets.yaml +++ b/sdk/search/azure-search-documents/tests/recordings/test_search_index_client_skillset_live.test_get_skillsets.yaml @@ -16,12 +16,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://searchaa02178e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68B42CDF74\"","name":"test-ss-1","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C3656261D4\"","name":"test-ss-1","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -30,11 +30,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:42 GMT + - Tue, 14 Sep 2021 21:05:35 GMT elapsed-time: - - '51' + - '2127' etag: - - W/"0x8D96E68B42CDF74" + - W/"0x8D977C3656261D4" expires: - '-1' location: @@ -46,7 +46,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - d12da940-0c44-11ec-8a11-74c63bed1137 + - 8133f1db-159f-11ec-9110-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -69,12 +69,12 @@ interactions: Content-Type: - application/json User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: POST uri: https://searchaa02178e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D96E68B43AC58E\"","name":"test-ss-2","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' + string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets/$entity","@odata.etag":"\"0x8D977C365713281\"","name":"test-ss-2","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":null,"description":null,"context":null,"categories":[],"defaultLanguageCode":null,"minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}' headers: cache-control: - no-cache @@ -83,11 +83,11 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:42 GMT + - Tue, 14 Sep 2021 21:05:35 GMT elapsed-time: - - '46' + - '50' etag: - - W/"0x8D96E68B43AC58E" + - W/"0x8D977C365713281" expires: - '-1' location: @@ -99,7 +99,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - d1554d7a-0c44-11ec-9dfb-74c63bed1137 + - 829337aa-159f-11ec-9b0e-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains status: @@ -115,12 +115,12 @@ interactions: Connection: - keep-alive User-Agent: - - azsdk-python-search-documents/11.3.0b3 Python/3.9.2 (Windows-10-10.0.19041-SP0) + - azsdk-python-search-documents/11.3.0b4 Python/3.9.2 (Windows-10-10.0.19041-SP0) method: GET uri: https://searchaa02178e.search.windows.net/skillsets?api-version=2021-04-30-Preview response: body: - string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D96E68B42CDF74\"","name":"test-ss-1","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null},{"@odata.etag":"\"0x8D96E68B43AC58E\"","name":"test-ss-2","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' + string: '{"@odata.context":"https://searchaa02178e.search.windows.net/$metadata#skillsets","value":[{"@odata.etag":"\"0x8D977C3656261D4\"","name":"test-ss-1","description":"desc1","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null},{"@odata.etag":"\"0x8D977C365713281\"","name":"test-ss-2","description":"desc2","skills":[{"@odata.type":"#Microsoft.Skills.Text.EntityRecognitionSkill","name":"#1","description":null,"context":"/document","categories":["Person","Quantity","Organization","URL","Email","Location","DateTime"],"defaultLanguageCode":"en","minimumPrecision":null,"includeTypelessEntities":null,"inputs":[{"name":"text","source":"/document/content","sourceContext":null,"inputs":[]}],"outputs":[{"name":"organizations","targetName":"organizations"}]}],"cognitiveServices":null,"knowledgeStore":null,"encryptionKey":null}]}' headers: cache-control: - no-cache @@ -129,9 +129,9 @@ interactions: content-type: - application/json; odata.metadata=minimal date: - - Thu, 02 Sep 2021 23:23:42 GMT + - Tue, 14 Sep 2021 21:05:35 GMT elapsed-time: - - '23' + - '95' expires: - '-1' odata-version: @@ -141,7 +141,7 @@ interactions: preference-applied: - odata.include-annotations="*" request-id: - - d163c491-0c44-11ec-8ccb-74c63bed1137 + - 82a27edc-159f-11ec-9983-74c63bed1137 strict-transport-security: - max-age=15724800; includeSubDomains vary: diff --git a/sdk/search/azure-search-documents/tests/search_service_preparer.py b/sdk/search/azure-search-documents/tests/search_service_preparer.py index 4bef849223ac..ada16aafc06c 100644 --- a/sdk/search/azure-search-documents/tests/search_service_preparer.py +++ b/sdk/search/azure-search-documents/tests/search_service_preparer.py @@ -90,7 +90,7 @@ def create_resource(self, name, **kwargs): # create the search service from azure.mgmt.search.models import SearchService, Sku - service_config = SearchService(location="West US", sku=Sku(name="free")) + service_config = SearchService(location="West US", sku=Sku(name="basic")) resource = self.mgmt_client.services.begin_create_or_update( group_name, self.service_name, service_config ) diff --git a/sdk/search/azure-search-documents/tests/test_search_index_client_skillset_live.py b/sdk/search/azure-search-documents/tests/test_search_index_client_skillset_live.py index cd586a816deb..b0fc016d2633 100644 --- a/sdk/search/azure-search-documents/tests/test_search_index_client_skillset_live.py +++ b/sdk/search/azure-search-documents/tests/test_search_index_client_skillset_live.py @@ -47,20 +47,17 @@ def test_create_skillset(self, api_key, endpoint, index_name, **kwargs): s1 = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizationsS1")], description="Skill Version 1", - model_version="1", include_typeless_entities=True) s2 = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizationsS2")], skill_version=EntityRecognitionSkillVersion.LATEST, description="Skill Version 3", - model_version="3", - include_typeless_entities=True) + model_version="3") s3 = SentimentSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="score", target_name="scoreS3")], skill_version=SentimentSkillVersion.V1, - description="Sentiment V1", - include_opinion_mining=True) + description="Sentiment V1") s4 = SentimentSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="confidenceScores", target_name="scoreS4")], @@ -74,8 +71,6 @@ def test_create_skillset(self, api_key, endpoint, index_name, **kwargs): skillset = SearchIndexerSkillset(name=name, skills=list([s1, s2, s3, s4, s5]), description="desc") - client.delete_skillset(name) - dict_skills = [skill.as_dict() for skill in skillset.skills] skillset.skills = dict_skills @@ -99,6 +94,36 @@ def test_create_skillset(self, api_key, endpoint, index_name, **kwargs): assert len(client.get_skillsets()) == 1 + + def test_create_skillset_validation(self, **kwargs): + with pytest.raises(ValueError) as err: + client = SearchIndexerClient("fake_endpoint", AzureKeyCredential("fake_key")) + name = "test-ss" + + s1 = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], + outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizationsS1")], + description="Skill Version 1", + model_version="1", + include_typeless_entities=True) + + s2 = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], + outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizationsS2")], + skill_version=EntityRecognitionSkillVersion.LATEST, + description="Skill Version 3", + model_version="3", + include_typeless_entities=True) + s3 = SentimentSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], + outputs=[OutputFieldMappingEntry(name="score", target_name="scoreS3")], + skill_version=SentimentSkillVersion.V1, + description="Sentiment V1", + include_opinion_mining=True) + skillset = SearchIndexerSkillset(name=name, skills=list([s1, s2, s3]), description="desc") + client.create_skillset(skillset) + assert 'include_typeless_entities' in str(err.value) + assert 'model_version' in str(err.value) + assert 'include_opinion_mining' in str(err.value) + + @SearchResourceGroupPreparer(random_name_enabled=True) @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_delete_skillset(self, api_key, endpoint, index_name, **kwargs): diff --git a/sdk/storage/azure-mgmt-storage/CHANGELOG.md b/sdk/storage/azure-mgmt-storage/CHANGELOG.md index 5cd1252d5689..5505dab1d2f3 100644 --- a/sdk/storage/azure-mgmt-storage/CHANGELOG.md +++ b/sdk/storage/azure-mgmt-storage/CHANGELOG.md @@ -1,5 +1,35 @@ # Release History +## 19.0.0 (2021-09-14) + +**Features** + + - Model BlobContainer has a new parameter enable_nfs_v3_root_squash + - Model BlobContainer has a new parameter enable_nfs_v3_all_squash + - Model UpdateHistoryProperty has a new parameter allow_protected_append_writes + - Model UpdateHistoryProperty has a new parameter allow_protected_append_writes_all + - Model StorageAccountUpdateParameters has a new parameter default_to_o_auth_authentication + - Model StorageAccountUpdateParameters has a new parameter public_network_access + - Model StorageAccountUpdateParameters has a new parameter immutable_storage_with_versioning + - Model ImmutabilityPolicy has a new parameter allow_protected_append_writes_all + - Model StorageAccountCreateParameters has a new parameter default_to_o_auth_authentication + - Model StorageAccountCreateParameters has a new parameter public_network_access + - Model StorageAccountCreateParameters has a new parameter immutable_storage_with_versioning + - Model ListContainerItem has a new parameter enable_nfs_v3_root_squash + - Model ListContainerItem has a new parameter enable_nfs_v3_all_squash + - Model LegalHoldProperties has a new parameter protected_append_writes_history + - Model ImmutabilityPolicyProperties has a new parameter allow_protected_append_writes_all + - Model StorageAccount has a new parameter default_to_o_auth_authentication + - Model StorageAccount has a new parameter public_network_access + - Model StorageAccount has a new parameter immutable_storage_with_versioning + - Model LegalHold has a new parameter allow_protected_append_writes_all + - Added operation StorageAccountsOperations.begin_abort_hierarchical_namespace_migration + - Added operation StorageAccountsOperations.begin_hierarchical_namespace_migration + +**Breaking changes** + + - Model AccessPolicy has a new signature + ## 18.0.0 (2021-05-13) **Features** diff --git a/sdk/storage/azure-mgmt-storage/MANIFEST.in b/sdk/storage/azure-mgmt-storage/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/storage/azure-mgmt-storage/MANIFEST.in +++ b/sdk/storage/azure-mgmt-storage/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/storage/azure-mgmt-storage/_meta.json b/sdk/storage/azure-mgmt-storage/_meta.json index 7af3043bc60f..c3a7720c0900 100644 --- a/sdk/storage/azure-mgmt-storage/_meta.json +++ b/sdk/storage/azure-mgmt-storage/_meta.json @@ -1,8 +1,11 @@ { - "autorest": "3.3.0", - "use": "@autorest/python@5.6.6", - "commit": "719b74f77b92eb1ec3814be6c4488bcf6b651733", + "autorest": "3.4.5", + "use": [ + "@autorest/python@5.8.4", + "@autorest/modelerfour@4.19.2" + ], + "commit": "cfa9e0f3df4553767d7915ec8f471ff7d4931ed1", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/storage/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.6.6 --version=3.3.0", + "autorest_command": "autorest specification/storage/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", "readme": "specification/storage/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py index db2c5f8ea626..8a5a61e9f4ff 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py @@ -56,7 +56,7 @@ class StorageManagementClient(MultiApiClientMixin, _SDKClient): :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2021-04-01' + DEFAULT_API_VERSION = '2021-06-01' _PROFILE_TAG = "azure.mgmt.storage.StorageManagementClient" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { @@ -107,6 +107,7 @@ def models(cls, api_version=DEFAULT_API_VERSION): * 2021-01-01: :mod:`v2021_01_01.models` * 2021-02-01: :mod:`v2021_02_01.models` * 2021-04-01: :mod:`v2021_04_01.models` + * 2021-06-01: :mod:`v2021_06_01.models` """ if api_version == '2015-06-15': from .v2015_06_15 import models @@ -153,6 +154,9 @@ def models(cls, api_version=DEFAULT_API_VERSION): elif api_version == '2021-04-01': from .v2021_04_01 import models return models + elif api_version == '2021-06-01': + from .v2021_06_01 import models + return models raise ValueError("API version {} is not available".format(api_version)) @property @@ -169,6 +173,7 @@ def blob_containers(self): * 2021-01-01: :class:`BlobContainersOperations` * 2021-02-01: :class:`BlobContainersOperations` * 2021-04-01: :class:`BlobContainersOperations` + * 2021-06-01: :class:`BlobContainersOperations` """ api_version = self._get_api_version('blob_containers') if api_version == '2018-02-01': @@ -191,6 +196,8 @@ def blob_containers(self): from .v2021_02_01.operations import BlobContainersOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import BlobContainersOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import BlobContainersOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_containers'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -204,6 +211,7 @@ def blob_inventory_policies(self): * 2021-01-01: :class:`BlobInventoryPoliciesOperations` * 2021-02-01: :class:`BlobInventoryPoliciesOperations` * 2021-04-01: :class:`BlobInventoryPoliciesOperations` + * 2021-06-01: :class:`BlobInventoryPoliciesOperations` """ api_version = self._get_api_version('blob_inventory_policies') if api_version == '2019-06-01': @@ -216,6 +224,8 @@ def blob_inventory_policies(self): from .v2021_02_01.operations import BlobInventoryPoliciesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import BlobInventoryPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import BlobInventoryPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_inventory_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -232,6 +242,7 @@ def blob_services(self): * 2021-01-01: :class:`BlobServicesOperations` * 2021-02-01: :class:`BlobServicesOperations` * 2021-04-01: :class:`BlobServicesOperations` + * 2021-06-01: :class:`BlobServicesOperations` """ api_version = self._get_api_version('blob_services') if api_version == '2018-07-01': @@ -250,6 +261,8 @@ def blob_services(self): from .v2021_02_01.operations import BlobServicesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import BlobServicesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import BlobServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -262,6 +275,7 @@ def deleted_accounts(self): * 2021-01-01: :class:`DeletedAccountsOperations` * 2021-02-01: :class:`DeletedAccountsOperations` * 2021-04-01: :class:`DeletedAccountsOperations` + * 2021-06-01: :class:`DeletedAccountsOperations` """ api_version = self._get_api_version('deleted_accounts') if api_version == '2020-08-01-preview': @@ -272,6 +286,8 @@ def deleted_accounts(self): from .v2021_02_01.operations import DeletedAccountsOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import DeletedAccountsOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import DeletedAccountsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'deleted_accounts'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -285,6 +301,7 @@ def encryption_scopes(self): * 2021-01-01: :class:`EncryptionScopesOperations` * 2021-02-01: :class:`EncryptionScopesOperations` * 2021-04-01: :class:`EncryptionScopesOperations` + * 2021-06-01: :class:`EncryptionScopesOperations` """ api_version = self._get_api_version('encryption_scopes') if api_version == '2019-06-01': @@ -297,6 +314,8 @@ def encryption_scopes(self): from .v2021_02_01.operations import EncryptionScopesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import EncryptionScopesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import EncryptionScopesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'encryption_scopes'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -311,6 +330,7 @@ def file_services(self): * 2021-01-01: :class:`FileServicesOperations` * 2021-02-01: :class:`FileServicesOperations` * 2021-04-01: :class:`FileServicesOperations` + * 2021-06-01: :class:`FileServicesOperations` """ api_version = self._get_api_version('file_services') if api_version == '2019-04-01': @@ -325,6 +345,8 @@ def file_services(self): from .v2021_02_01.operations import FileServicesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import FileServicesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import FileServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'file_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -339,6 +361,7 @@ def file_shares(self): * 2021-01-01: :class:`FileSharesOperations` * 2021-02-01: :class:`FileSharesOperations` * 2021-04-01: :class:`FileSharesOperations` + * 2021-06-01: :class:`FileSharesOperations` """ api_version = self._get_api_version('file_shares') if api_version == '2019-04-01': @@ -353,6 +376,8 @@ def file_shares(self): from .v2021_02_01.operations import FileSharesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import FileSharesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import FileSharesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'file_shares'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -369,6 +394,7 @@ def management_policies(self): * 2021-01-01: :class:`ManagementPoliciesOperations` * 2021-02-01: :class:`ManagementPoliciesOperations` * 2021-04-01: :class:`ManagementPoliciesOperations` + * 2021-06-01: :class:`ManagementPoliciesOperations` """ api_version = self._get_api_version('management_policies') if api_version == '2018-07-01': @@ -387,6 +413,8 @@ def management_policies(self): from .v2021_02_01.operations import ManagementPoliciesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import ManagementPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import ManagementPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'management_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -400,6 +428,7 @@ def object_replication_policies(self): * 2021-01-01: :class:`ObjectReplicationPoliciesOperations` * 2021-02-01: :class:`ObjectReplicationPoliciesOperations` * 2021-04-01: :class:`ObjectReplicationPoliciesOperations` + * 2021-06-01: :class:`ObjectReplicationPoliciesOperations` """ api_version = self._get_api_version('object_replication_policies') if api_version == '2019-06-01': @@ -412,6 +441,8 @@ def object_replication_policies(self): from .v2021_02_01.operations import ObjectReplicationPoliciesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import ObjectReplicationPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import ObjectReplicationPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'object_replication_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -432,6 +463,7 @@ def operations(self): * 2021-01-01: :class:`Operations` * 2021-02-01: :class:`Operations` * 2021-04-01: :class:`Operations` + * 2021-06-01: :class:`Operations` """ api_version = self._get_api_version('operations') if api_version == '2017-06-01': @@ -458,6 +490,8 @@ def operations(self): from .v2021_02_01.operations import Operations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import Operations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -471,6 +505,7 @@ def private_endpoint_connections(self): * 2021-01-01: :class:`PrivateEndpointConnectionsOperations` * 2021-02-01: :class:`PrivateEndpointConnectionsOperations` * 2021-04-01: :class:`PrivateEndpointConnectionsOperations` + * 2021-06-01: :class:`PrivateEndpointConnectionsOperations` """ api_version = self._get_api_version('private_endpoint_connections') if api_version == '2019-06-01': @@ -483,6 +518,8 @@ def private_endpoint_connections(self): from .v2021_02_01.operations import PrivateEndpointConnectionsOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import PrivateEndpointConnectionsOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import PrivateEndpointConnectionsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -496,6 +533,7 @@ def private_link_resources(self): * 2021-01-01: :class:`PrivateLinkResourcesOperations` * 2021-02-01: :class:`PrivateLinkResourcesOperations` * 2021-04-01: :class:`PrivateLinkResourcesOperations` + * 2021-06-01: :class:`PrivateLinkResourcesOperations` """ api_version = self._get_api_version('private_link_resources') if api_version == '2019-06-01': @@ -508,6 +546,8 @@ def private_link_resources(self): from .v2021_02_01.operations import PrivateLinkResourcesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import PrivateLinkResourcesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import PrivateLinkResourcesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -521,6 +561,7 @@ def queue(self): * 2021-01-01: :class:`QueueOperations` * 2021-02-01: :class:`QueueOperations` * 2021-04-01: :class:`QueueOperations` + * 2021-06-01: :class:`QueueOperations` """ api_version = self._get_api_version('queue') if api_version == '2019-06-01': @@ -533,6 +574,8 @@ def queue(self): from .v2021_02_01.operations import QueueOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import QueueOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import QueueOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'queue'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -546,6 +589,7 @@ def queue_services(self): * 2021-01-01: :class:`QueueServicesOperations` * 2021-02-01: :class:`QueueServicesOperations` * 2021-04-01: :class:`QueueServicesOperations` + * 2021-06-01: :class:`QueueServicesOperations` """ api_version = self._get_api_version('queue_services') if api_version == '2019-06-01': @@ -558,6 +602,8 @@ def queue_services(self): from .v2021_02_01.operations import QueueServicesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import QueueServicesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import QueueServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'queue_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -578,6 +624,7 @@ def skus(self): * 2021-01-01: :class:`SkusOperations` * 2021-02-01: :class:`SkusOperations` * 2021-04-01: :class:`SkusOperations` + * 2021-06-01: :class:`SkusOperations` """ api_version = self._get_api_version('skus') if api_version == '2017-06-01': @@ -604,6 +651,8 @@ def skus(self): from .v2021_02_01.operations import SkusOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import SkusOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import SkusOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'skus'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -627,6 +676,7 @@ def storage_accounts(self): * 2021-01-01: :class:`StorageAccountsOperations` * 2021-02-01: :class:`StorageAccountsOperations` * 2021-04-01: :class:`StorageAccountsOperations` + * 2021-06-01: :class:`StorageAccountsOperations` """ api_version = self._get_api_version('storage_accounts') if api_version == '2015-06-15': @@ -659,6 +709,8 @@ def storage_accounts(self): from .v2021_02_01.operations import StorageAccountsOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import StorageAccountsOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import StorageAccountsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'storage_accounts'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -672,6 +724,7 @@ def table(self): * 2021-01-01: :class:`TableOperations` * 2021-02-01: :class:`TableOperations` * 2021-04-01: :class:`TableOperations` + * 2021-06-01: :class:`TableOperations` """ api_version = self._get_api_version('table') if api_version == '2019-06-01': @@ -684,6 +737,8 @@ def table(self): from .v2021_02_01.operations import TableOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import TableOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import TableOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'table'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -697,6 +752,7 @@ def table_services(self): * 2021-01-01: :class:`TableServicesOperations` * 2021-02-01: :class:`TableServicesOperations` * 2021-04-01: :class:`TableServicesOperations` + * 2021-06-01: :class:`TableServicesOperations` """ api_version = self._get_api_version('table_services') if api_version == '2019-06-01': @@ -709,6 +765,8 @@ def table_services(self): from .v2021_02_01.operations import TableServicesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import TableServicesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import TableServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'table_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -754,6 +812,7 @@ def usages(self): * 2021-01-01: :class:`UsagesOperations` * 2021-02-01: :class:`UsagesOperations` * 2021-04-01: :class:`UsagesOperations` + * 2021-06-01: :class:`UsagesOperations` """ api_version = self._get_api_version('usages') if api_version == '2018-03-01-preview': @@ -774,6 +833,8 @@ def usages(self): from .v2021_02_01.operations import UsagesOperations as OperationClass elif api_version == '2021-04-01': from .v2021_04_01.operations import UsagesOperations as OperationClass + elif api_version == '2021-06-01': + from .v2021_06_01.operations import UsagesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'usages'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py index fd82a0d1274f..fb7288a742cb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_version.py @@ -5,4 +5,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py index 2528573c561f..f4381b05dccf 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/aio/_storage_management_client.py @@ -54,7 +54,7 @@ class StorageManagementClient(MultiApiClientMixin, _SDKClient): :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2021-04-01' + DEFAULT_API_VERSION = '2021-06-01' _PROFILE_TAG = "azure.mgmt.storage.StorageManagementClient" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { @@ -105,6 +105,7 @@ def models(cls, api_version=DEFAULT_API_VERSION): * 2021-01-01: :mod:`v2021_01_01.models` * 2021-02-01: :mod:`v2021_02_01.models` * 2021-04-01: :mod:`v2021_04_01.models` + * 2021-06-01: :mod:`v2021_06_01.models` """ if api_version == '2015-06-15': from ..v2015_06_15 import models @@ -151,6 +152,9 @@ def models(cls, api_version=DEFAULT_API_VERSION): elif api_version == '2021-04-01': from ..v2021_04_01 import models return models + elif api_version == '2021-06-01': + from ..v2021_06_01 import models + return models raise ValueError("API version {} is not available".format(api_version)) @property @@ -167,6 +171,7 @@ def blob_containers(self): * 2021-01-01: :class:`BlobContainersOperations` * 2021-02-01: :class:`BlobContainersOperations` * 2021-04-01: :class:`BlobContainersOperations` + * 2021-06-01: :class:`BlobContainersOperations` """ api_version = self._get_api_version('blob_containers') if api_version == '2018-02-01': @@ -189,6 +194,8 @@ def blob_containers(self): from ..v2021_02_01.aio.operations import BlobContainersOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import BlobContainersOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import BlobContainersOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_containers'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -202,6 +209,7 @@ def blob_inventory_policies(self): * 2021-01-01: :class:`BlobInventoryPoliciesOperations` * 2021-02-01: :class:`BlobInventoryPoliciesOperations` * 2021-04-01: :class:`BlobInventoryPoliciesOperations` + * 2021-06-01: :class:`BlobInventoryPoliciesOperations` """ api_version = self._get_api_version('blob_inventory_policies') if api_version == '2019-06-01': @@ -214,6 +222,8 @@ def blob_inventory_policies(self): from ..v2021_02_01.aio.operations import BlobInventoryPoliciesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import BlobInventoryPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import BlobInventoryPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_inventory_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -230,6 +240,7 @@ def blob_services(self): * 2021-01-01: :class:`BlobServicesOperations` * 2021-02-01: :class:`BlobServicesOperations` * 2021-04-01: :class:`BlobServicesOperations` + * 2021-06-01: :class:`BlobServicesOperations` """ api_version = self._get_api_version('blob_services') if api_version == '2018-07-01': @@ -248,6 +259,8 @@ def blob_services(self): from ..v2021_02_01.aio.operations import BlobServicesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import BlobServicesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import BlobServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'blob_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -260,6 +273,7 @@ def deleted_accounts(self): * 2021-01-01: :class:`DeletedAccountsOperations` * 2021-02-01: :class:`DeletedAccountsOperations` * 2021-04-01: :class:`DeletedAccountsOperations` + * 2021-06-01: :class:`DeletedAccountsOperations` """ api_version = self._get_api_version('deleted_accounts') if api_version == '2020-08-01-preview': @@ -270,6 +284,8 @@ def deleted_accounts(self): from ..v2021_02_01.aio.operations import DeletedAccountsOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import DeletedAccountsOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import DeletedAccountsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'deleted_accounts'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -283,6 +299,7 @@ def encryption_scopes(self): * 2021-01-01: :class:`EncryptionScopesOperations` * 2021-02-01: :class:`EncryptionScopesOperations` * 2021-04-01: :class:`EncryptionScopesOperations` + * 2021-06-01: :class:`EncryptionScopesOperations` """ api_version = self._get_api_version('encryption_scopes') if api_version == '2019-06-01': @@ -295,6 +312,8 @@ def encryption_scopes(self): from ..v2021_02_01.aio.operations import EncryptionScopesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import EncryptionScopesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import EncryptionScopesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'encryption_scopes'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -309,6 +328,7 @@ def file_services(self): * 2021-01-01: :class:`FileServicesOperations` * 2021-02-01: :class:`FileServicesOperations` * 2021-04-01: :class:`FileServicesOperations` + * 2021-06-01: :class:`FileServicesOperations` """ api_version = self._get_api_version('file_services') if api_version == '2019-04-01': @@ -323,6 +343,8 @@ def file_services(self): from ..v2021_02_01.aio.operations import FileServicesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import FileServicesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import FileServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'file_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -337,6 +359,7 @@ def file_shares(self): * 2021-01-01: :class:`FileSharesOperations` * 2021-02-01: :class:`FileSharesOperations` * 2021-04-01: :class:`FileSharesOperations` + * 2021-06-01: :class:`FileSharesOperations` """ api_version = self._get_api_version('file_shares') if api_version == '2019-04-01': @@ -351,6 +374,8 @@ def file_shares(self): from ..v2021_02_01.aio.operations import FileSharesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import FileSharesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import FileSharesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'file_shares'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -367,6 +392,7 @@ def management_policies(self): * 2021-01-01: :class:`ManagementPoliciesOperations` * 2021-02-01: :class:`ManagementPoliciesOperations` * 2021-04-01: :class:`ManagementPoliciesOperations` + * 2021-06-01: :class:`ManagementPoliciesOperations` """ api_version = self._get_api_version('management_policies') if api_version == '2018-07-01': @@ -385,6 +411,8 @@ def management_policies(self): from ..v2021_02_01.aio.operations import ManagementPoliciesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import ManagementPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import ManagementPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'management_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -398,6 +426,7 @@ def object_replication_policies(self): * 2021-01-01: :class:`ObjectReplicationPoliciesOperations` * 2021-02-01: :class:`ObjectReplicationPoliciesOperations` * 2021-04-01: :class:`ObjectReplicationPoliciesOperations` + * 2021-06-01: :class:`ObjectReplicationPoliciesOperations` """ api_version = self._get_api_version('object_replication_policies') if api_version == '2019-06-01': @@ -410,6 +439,8 @@ def object_replication_policies(self): from ..v2021_02_01.aio.operations import ObjectReplicationPoliciesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import ObjectReplicationPoliciesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import ObjectReplicationPoliciesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'object_replication_policies'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -430,6 +461,7 @@ def operations(self): * 2021-01-01: :class:`Operations` * 2021-02-01: :class:`Operations` * 2021-04-01: :class:`Operations` + * 2021-06-01: :class:`Operations` """ api_version = self._get_api_version('operations') if api_version == '2017-06-01': @@ -456,6 +488,8 @@ def operations(self): from ..v2021_02_01.aio.operations import Operations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import Operations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import Operations as OperationClass else: raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -469,6 +503,7 @@ def private_endpoint_connections(self): * 2021-01-01: :class:`PrivateEndpointConnectionsOperations` * 2021-02-01: :class:`PrivateEndpointConnectionsOperations` * 2021-04-01: :class:`PrivateEndpointConnectionsOperations` + * 2021-06-01: :class:`PrivateEndpointConnectionsOperations` """ api_version = self._get_api_version('private_endpoint_connections') if api_version == '2019-06-01': @@ -481,6 +516,8 @@ def private_endpoint_connections(self): from ..v2021_02_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -494,6 +531,7 @@ def private_link_resources(self): * 2021-01-01: :class:`PrivateLinkResourcesOperations` * 2021-02-01: :class:`PrivateLinkResourcesOperations` * 2021-04-01: :class:`PrivateLinkResourcesOperations` + * 2021-06-01: :class:`PrivateLinkResourcesOperations` """ api_version = self._get_api_version('private_link_resources') if api_version == '2019-06-01': @@ -506,6 +544,8 @@ def private_link_resources(self): from ..v2021_02_01.aio.operations import PrivateLinkResourcesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import PrivateLinkResourcesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import PrivateLinkResourcesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -519,6 +559,7 @@ def queue(self): * 2021-01-01: :class:`QueueOperations` * 2021-02-01: :class:`QueueOperations` * 2021-04-01: :class:`QueueOperations` + * 2021-06-01: :class:`QueueOperations` """ api_version = self._get_api_version('queue') if api_version == '2019-06-01': @@ -531,6 +572,8 @@ def queue(self): from ..v2021_02_01.aio.operations import QueueOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import QueueOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import QueueOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'queue'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -544,6 +587,7 @@ def queue_services(self): * 2021-01-01: :class:`QueueServicesOperations` * 2021-02-01: :class:`QueueServicesOperations` * 2021-04-01: :class:`QueueServicesOperations` + * 2021-06-01: :class:`QueueServicesOperations` """ api_version = self._get_api_version('queue_services') if api_version == '2019-06-01': @@ -556,6 +600,8 @@ def queue_services(self): from ..v2021_02_01.aio.operations import QueueServicesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import QueueServicesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import QueueServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'queue_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -576,6 +622,7 @@ def skus(self): * 2021-01-01: :class:`SkusOperations` * 2021-02-01: :class:`SkusOperations` * 2021-04-01: :class:`SkusOperations` + * 2021-06-01: :class:`SkusOperations` """ api_version = self._get_api_version('skus') if api_version == '2017-06-01': @@ -602,6 +649,8 @@ def skus(self): from ..v2021_02_01.aio.operations import SkusOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import SkusOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import SkusOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'skus'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -625,6 +674,7 @@ def storage_accounts(self): * 2021-01-01: :class:`StorageAccountsOperations` * 2021-02-01: :class:`StorageAccountsOperations` * 2021-04-01: :class:`StorageAccountsOperations` + * 2021-06-01: :class:`StorageAccountsOperations` """ api_version = self._get_api_version('storage_accounts') if api_version == '2015-06-15': @@ -657,6 +707,8 @@ def storage_accounts(self): from ..v2021_02_01.aio.operations import StorageAccountsOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import StorageAccountsOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import StorageAccountsOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'storage_accounts'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -670,6 +722,7 @@ def table(self): * 2021-01-01: :class:`TableOperations` * 2021-02-01: :class:`TableOperations` * 2021-04-01: :class:`TableOperations` + * 2021-06-01: :class:`TableOperations` """ api_version = self._get_api_version('table') if api_version == '2019-06-01': @@ -682,6 +735,8 @@ def table(self): from ..v2021_02_01.aio.operations import TableOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import TableOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import TableOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'table'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -695,6 +750,7 @@ def table_services(self): * 2021-01-01: :class:`TableServicesOperations` * 2021-02-01: :class:`TableServicesOperations` * 2021-04-01: :class:`TableServicesOperations` + * 2021-06-01: :class:`TableServicesOperations` """ api_version = self._get_api_version('table_services') if api_version == '2019-06-01': @@ -707,6 +763,8 @@ def table_services(self): from ..v2021_02_01.aio.operations import TableServicesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import TableServicesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import TableServicesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'table_services'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) @@ -752,6 +810,7 @@ def usages(self): * 2021-01-01: :class:`UsagesOperations` * 2021-02-01: :class:`UsagesOperations` * 2021-04-01: :class:`UsagesOperations` + * 2021-06-01: :class:`UsagesOperations` """ api_version = self._get_api_version('usages') if api_version == '2018-03-01-preview': @@ -772,6 +831,8 @@ def usages(self): from ..v2021_02_01.aio.operations import UsagesOperations as OperationClass elif api_version == '2021-04-01': from ..v2021_04_01.aio.operations import UsagesOperations as OperationClass + elif api_version == '2021-06-01': + from ..v2021_06_01.aio.operations import UsagesOperations as OperationClass else: raise ValueError("API version {} does not have operation group 'usages'".format(api_version)) return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models.py index 4b99fd086598..60a2d2a8e47e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/models.py @@ -5,4 +5,4 @@ # license information. # -------------------------------------------------------------------------- from .v2018_02_01.models import * -from .v2021_04_01.models import * +from .v2021_06_01.models import * diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_storage_accounts_operations.py index b7e1145336f8..1f268c402268 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2015_06_15.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountKeys": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountKeys": """Regenerates one of the access keys for the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_usage_operations.py index 29594b6d85dc..d9f8fdb6dbcc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Lists the current usage count and the limit for the resources under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/operations/_storage_accounts_operations.py index c187f5445f0d..a3cfe7a86128 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2015_06_15.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_storage_accounts_operations.py index 60778dfc2ffa..7faa9b1117b9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -180,8 +180,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2016_01_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -238,7 +238,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -293,7 +293,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -356,7 +356,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -428,7 +428,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -496,7 +496,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -568,7 +568,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -629,7 +629,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_usage_operations.py index 52dc9615d6e4..a7cb247a208a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models.py index 78d74c7d009d..26863c8864f1 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models.py @@ -88,8 +88,8 @@ class Encryption(msrest.serialization.Model): :param services: List of services which support encryption. :type services: ~azure.mgmt.storage.v2016_01_01.models.EncryptionServices - :ivar key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Storage. Default value: "Microsoft.Storage". + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Storage. Has constant value: "Microsoft.Storage". :vartype key_source: str """ @@ -414,7 +414,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. :type name: str - :ivar type: Required. Default value: "Microsoft.Storage/storageAccounts". + :ivar type: Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models_py3.py index 045c2e649c84..955e9e189d80 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/models/_models_py3.py @@ -95,8 +95,8 @@ class Encryption(msrest.serialization.Model): :param services: List of services which support encryption. :type services: ~azure.mgmt.storage.v2016_01_01.models.EncryptionServices - :ivar key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Storage. Default value: "Microsoft.Storage". + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Storage. Has constant value: "Microsoft.Storage". :vartype key_source: str """ @@ -435,7 +435,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. :type name: str - :ivar type: Required. Default value: "Microsoft.Storage/storageAccounts". + :ivar type: Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/operations/_storage_accounts_operations.py index 2d4399ca813e..32e8240c5961 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/operations/_storage_accounts_operations.py @@ -187,8 +187,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2016_01_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_storage_accounts_operations.py index 2fb22b4bbd20..c8d9d3971a37 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2016_12_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -704,7 +704,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -773,7 +773,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_usage_operations.py index 0095b14fb85e..f86af2a94548 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models.py index 9ee059aae9f9..ccb022c84cda 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models.py @@ -156,8 +156,8 @@ class Encryption(msrest.serialization.Model): :param services: List of services which support encryption. :type services: ~azure.mgmt.storage.v2016_12_01.models.EncryptionServices - :ivar key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Storage. Default value: "Microsoft.Storage". + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Storage. Has constant value: "Microsoft.Storage". :vartype key_source: str """ @@ -658,7 +658,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. :type name: str - :ivar type: Required. Default value: "Microsoft.Storage/storageAccounts". + :ivar type: Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models_py3.py index 1b2617d1bfe7..978707220a8d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/models/_models_py3.py @@ -173,8 +173,8 @@ class Encryption(msrest.serialization.Model): :param services: List of services which support encryption. :type services: ~azure.mgmt.storage.v2016_12_01.models.EncryptionServices - :ivar key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Storage. Default value: "Microsoft.Storage". + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Storage. Has constant value: "Microsoft.Storage". :vartype key_source: str """ @@ -710,7 +710,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. :type name: str - :ivar type: Required. Default value: "Microsoft.Storage/storageAccounts". + :ivar type: Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/operations/_storage_accounts_operations.py index 710f809c21a5..5d6595502c7e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_12_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2016_12_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_operations.py index 1fe7969577bd..efe606dcfaed 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py index 5fd123a5b611..cc0aa50c9fe0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_storage_accounts_operations.py index b71bf71ea7e5..64d32bee43d8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -704,7 +704,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -773,7 +773,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_usage_operations.py index 61882126fc10..f4a58fed7514 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models.py index 58c4e203ed66..d48a55e487cd 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models.py @@ -323,7 +323,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -353,20 +353,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -374,14 +372,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -1073,7 +1070,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -1458,15 +1455,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2017_06_01.models.State @@ -1474,7 +1470,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1483,12 +1478,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models_py3.py index 3071a40a24f0..1fbc5e0de664 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/_models_py3.py @@ -352,7 +352,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -382,20 +382,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -403,16 +401,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -1168,7 +1166,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -1579,15 +1577,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2017_06_01.models.State @@ -1595,7 +1592,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1604,15 +1600,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/operations/_storage_accounts_operations.py index eded46aeae90..ac8b8d149fcc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2017_06_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_operations.py index 9fc993a2392a..b4c1535f7db2 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_skus_operations.py index daab3859df0f..da4d4780c46d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_storage_accounts_operations.py index 32ab52758329..f7436de7de95 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -704,7 +704,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -773,7 +773,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_usage_operations.py index bdc9f7d621b4..7675582d3696 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models.py index ea4836c69ae8..247337f032db 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models.py @@ -323,7 +323,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -353,20 +353,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -374,14 +372,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -1073,7 +1070,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -1463,15 +1460,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2017_10_01.models.State @@ -1479,7 +1475,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1488,12 +1483,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models_py3.py index cfe831cc6baf..a7523b679d83 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/_models_py3.py @@ -352,7 +352,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -382,20 +382,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -403,16 +401,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -1168,7 +1166,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -1585,15 +1583,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2017_10_01.models.State @@ -1601,7 +1598,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1610,15 +1606,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/operations/_storage_accounts_operations.py index 9cc1ae5756f3..62a52a1945c5 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_blob_containers_operations.py index 1eb3e01971df..0745b0e44630 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_blob_containers_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListContainerItems": """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -108,7 +108,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -186,7 +186,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -262,7 +262,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -330,7 +330,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -394,7 +394,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -472,7 +472,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -550,7 +550,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -641,7 +641,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -721,7 +721,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -802,7 +802,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -880,7 +880,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -969,7 +969,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_operations.py index 8f032f550b45..afacadb61fae 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_skus_operations.py index c962a2c29279..a8133bca91dd 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_storage_accounts_operations.py index 32565fe7ff87..0842a3a0d05c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2018_02_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -704,7 +704,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -773,7 +773,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_usage_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_usage_operations.py index 8c77fa0feeff..d04e257ee6de 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_usage_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/aio/operations/_usage_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. @@ -110,7 +110,7 @@ async def get_next(next_link=None): def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models.py index d9c903663426..8876aeaf9988 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models.py @@ -505,7 +505,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -628,20 +628,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -649,14 +647,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -1611,7 +1608,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2103,15 +2100,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_02_01.models.State @@ -2119,7 +2115,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2128,12 +2123,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models_py3.py index bfd646d618d2..479bf8bb2d31 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/models/_models_py3.py @@ -537,7 +537,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -664,20 +664,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -685,16 +683,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -1733,7 +1731,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2253,15 +2251,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_02_01.models.State @@ -2269,7 +2266,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2278,15 +2274,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/operations/_storage_accounts_operations.py index 51b718236878..8607e3345db0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_02_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2018_02_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_blob_containers_operations.py index 111241aa098e..cbbc78a23b4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_blob_containers_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListContainerItems": """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -108,7 +108,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -186,7 +186,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -262,7 +262,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -330,7 +330,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -394,7 +394,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -472,7 +472,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -550,7 +550,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -641,7 +641,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -721,7 +721,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -802,7 +802,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -880,7 +880,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -969,7 +969,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_operations.py index 8acaf370503e..f16fcc74108e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_skus_operations.py index a253f1b1bae8..78e9436d07d3 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_storage_accounts_operations.py index 96edd77d655b..798a72a41dd5 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -295,7 +295,7 @@ async def get_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -359,7 +359,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -432,7 +432,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -500,7 +500,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -573,7 +573,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -635,7 +635,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -704,7 +704,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -773,7 +773,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -842,7 +842,7 @@ async def get_management_policies( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.StorageAccountManagementPolicies": """Gets the data policy rules associated with the specified storage account. @@ -909,7 +909,7 @@ async def create_or_update_management_policies( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPoliciesRulesSetParameter", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountManagementPolicies": """Sets the data policy rules associated with the specified storage account. @@ -982,7 +982,7 @@ async def delete_management_policies( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the data policy rules associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_usages_operations.py index 6b1f688f21cc..af7951014e4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/aio/operations/_usages_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources under the subscription. @@ -110,7 +110,7 @@ async def get_next(next_link=None): def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models.py index 21ae67fbf49b..3317718d039c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models.py @@ -505,7 +505,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -629,20 +629,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -650,14 +648,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -994,11 +991,11 @@ class ManagementPoliciesRules(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, } def __init__( @@ -1014,11 +1011,11 @@ class ManagementPoliciesRulesSetParameter(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, } def __init__( @@ -1657,7 +1654,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -1862,7 +1859,7 @@ class StorageAccountManagementPolicies(Resource): :vartype type: str :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -1878,7 +1875,7 @@ class StorageAccountManagementPolicies(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, } @@ -1898,7 +1895,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -1908,7 +1905,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): } _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, } @@ -2223,15 +2220,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_03_01_preview.models.State @@ -2239,7 +2235,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2248,12 +2243,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models_py3.py index 57268137f721..d0bed1a06631 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union import msrest.serialization @@ -537,7 +537,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -665,20 +665,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -686,16 +684,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -1054,17 +1052,17 @@ class ManagementPoliciesRules(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(ManagementPoliciesRules, self).__init__(**kwargs) @@ -1076,17 +1074,17 @@ class ManagementPoliciesRulesSetParameter(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(ManagementPoliciesRulesSetParameter, self).__init__(**kwargs) @@ -1783,7 +1781,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2002,7 +2000,7 @@ class StorageAccountManagementPolicies(Resource): :vartype type: str :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2018,14 +2016,14 @@ class StorageAccountManagementPolicies(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(StorageAccountManagementPolicies, self).__init__(**kwargs) @@ -2040,7 +2038,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2050,14 +2048,14 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): } _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(StorageAccountManagementPoliciesRulesProperty, self).__init__(policy=policy, **kwargs) @@ -2381,15 +2379,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_03_01_preview.models.State @@ -2397,7 +2394,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2406,15 +2402,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/operations/_storage_accounts_operations.py index 9bcd6256d306..37821a6431a6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_containers_operations.py index f724311b8aa2..fa446262595e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_containers_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListContainerItems": """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -108,7 +108,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -186,7 +186,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -262,7 +262,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -330,7 +330,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -394,7 +394,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -472,7 +472,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -550,7 +550,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -641,7 +641,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -721,7 +721,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -802,7 +802,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -880,7 +880,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -969,7 +969,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_services_operations.py index 6fa8f697c590..fc13dc5fd68d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -117,7 +117,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_management_policies_operations.py index 2ad069b7d639..57e4c676eebc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.StorageAccountManagementPolicies": """Gets the data policy rules associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPoliciesRulesSetParameter", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountManagementPolicies": """Sets the data policy rules associated with the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the data policy rules associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_operations.py index a899d4a9d237..8f374fe94387 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_skus_operations.py index fcfd30590798..a79667a98862 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_storage_accounts_operations.py index a7034a976d25..4ec8dc6ea00f 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[str] = "geoReplicationStats", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -365,7 +365,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -438,7 +438,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -506,7 +506,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -579,7 +579,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -641,7 +641,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -710,7 +710,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -779,7 +779,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -847,7 +847,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -889,7 +889,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -904,8 +904,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_usages_operations.py index 1f067a6c2b6b..b6dc43fd9328 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models.py index b473b061bc91..ea1e6fc66306 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models.py @@ -698,7 +698,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -821,20 +821,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -842,14 +840,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -1186,11 +1183,11 @@ class ManagementPoliciesRules(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, } def __init__( @@ -1206,11 +1203,11 @@ class ManagementPoliciesRulesSetParameter(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, } def __init__( @@ -1861,7 +1858,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2071,7 +2068,7 @@ class StorageAccountManagementPolicies(Resource): :vartype type: str :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2087,7 +2084,7 @@ class StorageAccountManagementPolicies(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, } @@ -2107,7 +2104,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2117,7 +2114,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): } _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, } @@ -2438,15 +2435,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_07_01.models.State @@ -2454,7 +2450,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2463,12 +2458,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models_py3.py index 23c38e1f31f8..6131ef6b9580 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union import msrest.serialization @@ -745,7 +745,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -872,20 +872,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -893,16 +891,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -1261,17 +1259,17 @@ class ManagementPoliciesRules(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(ManagementPoliciesRules, self).__init__(**kwargs) @@ -1283,17 +1281,17 @@ class ManagementPoliciesRulesSetParameter(msrest.serialization.Model): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any """ _attribute_map = { - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(ManagementPoliciesRulesSetParameter, self).__init__(**kwargs) @@ -2003,7 +2001,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2228,7 +2226,7 @@ class StorageAccountManagementPolicies(Resource): :vartype type: str :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2244,14 +2242,14 @@ class StorageAccountManagementPolicies(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'policy': {'key': 'properties.policy', 'type': 'str'}, + 'policy': {'key': 'properties.policy', 'type': 'object'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(StorageAccountManagementPolicies, self).__init__(**kwargs) @@ -2266,7 +2264,7 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): :param policy: The Storage Account ManagementPolicies Rules, in JSON format. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. - :type policy: str + :type policy: any :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. :vartype last_modified_time: ~datetime.datetime """ @@ -2276,14 +2274,14 @@ class StorageAccountManagementPoliciesRulesProperty(ManagementPoliciesRules): } _attribute_map = { - 'policy': {'key': 'policy', 'type': 'str'}, + 'policy': {'key': 'policy', 'type': 'object'}, 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, } def __init__( self, *, - policy: Optional[str] = None, + policy: Optional[Any] = None, **kwargs ): super(StorageAccountManagementPoliciesRulesProperty, self).__init__(policy=policy, **kwargs) @@ -2614,15 +2612,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_07_01.models.State @@ -2630,7 +2627,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2639,15 +2635,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/operations/_storage_accounts_operations.py index 077665035d98..d0027934c406 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_07_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2018_07_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -922,8 +922,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_containers_operations.py index 9cff9d8acdb4..1940f65d2bea 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_containers_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListContainerItems": """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -108,7 +108,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -190,7 +190,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -266,7 +266,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -334,7 +334,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -398,7 +398,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -476,7 +476,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -554,7 +554,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -645,7 +645,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -725,7 +725,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -806,7 +806,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -884,7 +884,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -973,7 +973,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_services_operations.py index 42c05e27fbd6..7562fad33ec9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -117,7 +117,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_management_policies_operations.py index 4cbf3f9ed304..b06335bcadfc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_operations.py index 6c5ccf3719f3..7ddde1f8e9e9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_skus_operations.py index 56e3ff52482f..157f14924a9d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_storage_accounts_operations.py index d55200170f3c..6db372da1d6d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2018_11_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[str] = "geoReplicationStats", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -365,7 +365,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -438,7 +438,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -506,7 +506,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -579,7 +579,7 @@ async def list_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys for the specified storage account. @@ -641,7 +641,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys for the specified storage account. @@ -710,7 +710,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -779,7 +779,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -847,7 +847,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -889,7 +889,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -904,8 +904,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -958,7 +958,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_usages_operations.py index 615ca2cde4c4..f5f99b4e06cf 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models.py index 27f9ab99baa6..f85be8e03a01 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models.py @@ -750,7 +750,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -873,20 +873,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -894,14 +892,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -2112,7 +2109,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2616,15 +2613,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_11_01.models.State @@ -2632,7 +2628,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2641,12 +2636,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models_py3.py index feb844b3dad7..bf89657db351 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/models/_models_py3.py @@ -801,7 +801,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -928,20 +928,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -949,16 +947,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -2278,7 +2276,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -2812,15 +2810,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2018_11_01.models.State @@ -2828,7 +2825,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2837,15 +2833,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/operations/_storage_accounts_operations.py index c57e491ea44a..a4ec5619f98f 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_11_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2018_11_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -922,8 +922,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py index f18a9bf8db40..8e2b43d5b6a9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py @@ -48,7 +48,7 @@ def list( skip_token: Optional[str] = None, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -142,7 +142,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -224,7 +224,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -300,7 +300,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -368,7 +368,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -432,7 +432,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -510,7 +510,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -588,7 +588,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -679,7 +679,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -759,7 +759,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -840,7 +840,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -918,7 +918,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1007,7 +1007,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_services_operations.py index 0b83f49e20a7..96d7966352d6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_services_operations.py index 273ed725bbf0..b7aaf2ea2584 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_shares_operations.py index 0bb360c3a307..0e341fb080e8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( skip_token: Optional[str] = None, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -141,7 +141,7 @@ async def create( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -223,7 +223,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -299,7 +299,7 @@ async def get( resource_group_name: str, account_name: str, share_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -367,7 +367,7 @@ async def delete( resource_group_name: str, account_name: str, share_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_management_policies_operations.py index 5f4d0b075d8f..9589da58dbde 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_operations.py index f5f819cd8278..03dfdc2e6bd9 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_skus_operations.py index c43bad98990c..f9f5d92a97dc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_storage_accounts_operations.py index 5d5785c037f1..b401f6df462d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2019_04_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[str] = "geoReplicationStats", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -365,7 +365,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -438,7 +438,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -506,7 +506,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -580,7 +580,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -647,7 +647,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -717,7 +717,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -786,7 +786,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -854,7 +854,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -896,7 +896,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -911,8 +911,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -965,7 +965,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_usages_operations.py index 58d6fe413f70..f962a6c9f6f1 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models.py index f6c657b18dc4..e743f1ef8fcb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models.py @@ -1121,7 +1121,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -1244,20 +1244,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1265,14 +1263,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -2521,7 +2518,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -3082,15 +3079,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2019_04_01.models.State @@ -3098,7 +3094,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -3107,12 +3102,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models_py3.py index 331d44784ac8..e0f440f9de9a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/models/_models_py3.py @@ -1199,7 +1199,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -1326,20 +1326,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1347,16 +1345,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -2716,7 +2714,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -3315,15 +3313,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2019_04_01.models.State @@ -3331,7 +3328,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -3340,15 +3336,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_storage_accounts_operations.py index 74757f174982..952d60742c41 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2019_04_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -929,8 +929,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_containers_operations.py index ae2224ec8fb2..9e80dd5aa03c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_containers_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, include: Optional[Union[str, "_models.ListContainersInclude"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -142,7 +142,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -224,7 +224,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -300,7 +300,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -368,7 +368,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -432,7 +432,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -510,7 +510,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -588,7 +588,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -679,7 +679,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -759,7 +759,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -840,7 +840,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -918,7 +918,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1007,7 +1007,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_inventory_policies_operations.py index 2c722c1abda2..7fb117f12140 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_inventory_policies_operations.py @@ -46,7 +46,7 @@ async def get( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Gets the blob inventory policy associated with the specified storage account. @@ -114,7 +114,7 @@ async def create_or_update( account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], properties: "_models.BlobInventoryPolicy", - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Sets the blob inventory policy to the specified storage account. @@ -188,7 +188,7 @@ async def delete( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the blob inventory policy associated with the specified storage account. @@ -251,7 +251,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: """Gets the blob inventory policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_services_operations.py index b58d04a492ba..39e0cc4125b0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_encryption_scopes_operations.py index 4e93adce98bc..419100afefae 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_encryption_scopes_operations.py @@ -47,7 +47,7 @@ async def put( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Synchronously creates or updates an encryption scope under the specified storage account. If an encryption scope is already created and a subsequent request is issued with different @@ -130,7 +130,7 @@ async def patch( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Update encryption scope properties as specified in the request body. Update fails if the specified encryption scope does not already exist. @@ -207,7 +207,7 @@ async def get( resource_group_name: str, account_name: str, encryption_scope_name: str, - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Returns the properties for the specified encryption scope. @@ -275,7 +275,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.EncryptionScopeListResult"]: """Lists all the encryption scopes available under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_services_operations.py index fcccd2fb523c..4c99ace8e42b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_shares_operations.py index da14d994bd31..09a2512ba8db 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[str] = "deleted", - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -141,7 +141,7 @@ async def create( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -223,7 +223,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -300,7 +300,7 @@ async def get( account_name: str, share_name: str, expand: Optional[str] = "stats", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -372,7 +372,7 @@ async def delete( resource_group_name: str, account_name: str, share_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. @@ -438,7 +438,7 @@ async def restore( account_name: str, share_name: str, deleted_share: "_models.DeletedShare", - **kwargs + **kwargs: Any ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_management_policies_operations.py index 76b93db0d9ca..29066d82165c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py index 904076f962a9..f0293809a7f1 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: """List the object replication policies associated with the storage account. @@ -124,7 +124,7 @@ async def get( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Get the object replication policy of the storage account by policy ID. @@ -192,7 +192,7 @@ async def create_or_update( account_name: str, object_replication_policy_id: str, properties: "_models.ObjectReplicationPolicy", - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Create or update the object replication policy of the storage account. @@ -267,7 +267,7 @@ async def delete( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the object replication policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_operations.py index 415785e7ec76..299e369b90b8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py index f28b04b22eba..7d02829dc493 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: """List all the private endpoint connections associated with the storage account. @@ -123,7 +123,7 @@ async def get( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Gets the specified private endpoint connection associated with the storage account. @@ -191,7 +191,7 @@ async def put( account_name: str, private_endpoint_connection_name: str, properties: "_models.PrivateEndpointConnection", - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Update the state of specified private endpoint connection associated with the storage account. @@ -265,7 +265,7 @@ async def delete( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the specified private endpoint connection associated with the storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_link_resources_operations.py index 849c64c0421c..456ab602b2fe 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_link_resources_operations.py @@ -44,7 +44,7 @@ async def list_by_storage_account( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateLinkResourceListResult": """Gets the private link resources that need to be created for a storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_operations.py index c79d3c4eb5fc..f69485f03047 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_operations.py @@ -47,7 +47,7 @@ async def create( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -123,7 +123,7 @@ async def update( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -198,7 +198,7 @@ async def get( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Gets the queue with the specified queue name, under the specified account if it exists. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the queue with the specified queue name, under the specified account if it exists. @@ -332,7 +332,7 @@ def list( account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListQueueResource"]: """Gets a list of all the queues under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_services_operations.py index dc0b41e212f9..614648ded66c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_queue_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListQueueServices": """List all queue services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.QueueServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Sets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Gets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_skus_operations.py index bc035d05d4e0..b83745e95012 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_storage_accounts_operations.py index 80ca6860d3d4..a4f8a87c0c7b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2019_06_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -366,7 +366,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -439,7 +439,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -507,7 +507,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -581,7 +581,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -648,7 +648,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -718,7 +718,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -787,7 +787,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -855,7 +855,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -897,7 +897,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -912,8 +912,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -967,7 +967,7 @@ async def _restore_blob_ranges_initial( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> "_models.BlobRestoreStatus": cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] error_map = { @@ -1024,7 +1024,7 @@ async def begin_restore_blob_ranges( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: """Restore blobs in the specified blob ranges. @@ -1039,8 +1039,8 @@ async def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2019_06_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) @@ -1097,7 +1097,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_operations.py index f6cbfc8a6bd3..ca6d307d82d8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_operations.py @@ -46,7 +46,7 @@ async def create( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -113,7 +113,7 @@ async def update( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -180,7 +180,7 @@ async def get( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Gets the table with the specified table name, under the specified account if it exists. @@ -247,7 +247,7 @@ async def delete( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the table with the specified table name, under the specified account if it exists. @@ -310,7 +310,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListTableResource"]: """Gets a list of all the tables under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_services_operations.py index f5128f278a7d..e73a0904d485 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_table_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListTableServices": """List all table services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.TableServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Sets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Gets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_usages_operations.py index 160c428f7228..5241dd42f118 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models.py index 1bccaf33474c..ac6432e96daf 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models.py @@ -1803,7 +1803,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -1939,20 +1939,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -1960,14 +1958,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -4028,7 +4025,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -4891,15 +4888,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2019_06_01.models.State @@ -4907,7 +4903,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -4916,12 +4911,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models_py3.py index 64f10653798a..8511407eeb5f 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/models/_models_py3.py @@ -1941,7 +1941,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -2083,20 +2083,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2104,16 +2102,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -4342,7 +4340,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5260,15 +5258,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2019_06_01.models.State @@ -5276,7 +5273,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5285,15 +5281,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/operations/_storage_accounts_operations.py index ad86d4f2fec2..543c0d4ae407 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2019_06_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -930,8 +930,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -1059,8 +1059,8 @@ def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2019_06_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_containers_operations.py index 1f0713a5497d..3fbfd652f9f8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_containers_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, include: Optional[Union[str, "_models.ListContainersInclude"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -142,7 +142,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -224,7 +224,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -300,7 +300,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -368,7 +368,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -432,7 +432,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -510,7 +510,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -588,7 +588,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -679,7 +679,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -759,7 +759,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -840,7 +840,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -918,7 +918,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1007,7 +1007,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_inventory_policies_operations.py index 266304790c4b..cb52b18812dc 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_inventory_policies_operations.py @@ -46,7 +46,7 @@ async def get( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Gets the blob inventory policy associated with the specified storage account. @@ -114,7 +114,7 @@ async def create_or_update( account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], properties: "_models.BlobInventoryPolicy", - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Sets the blob inventory policy to the specified storage account. @@ -188,7 +188,7 @@ async def delete( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the blob inventory policy associated with the specified storage account. @@ -251,7 +251,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: """Gets the blob inventory policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_services_operations.py index 907a41085167..a8b50842fc17 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_deleted_accounts_operations.py index 10783789f4e7..9ee4432d1747 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_deleted_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_deleted_accounts_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DeletedAccountListResult"]: """Lists deleted accounts under the subscription. @@ -112,7 +112,7 @@ async def get( self, deleted_account_name: str, location: str, - **kwargs + **kwargs: Any ) -> "_models.DeletedAccount": """Get properties of specified deleted account resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_encryption_scopes_operations.py index 13f7d4f1ad84..4e3ec3c11bec 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_encryption_scopes_operations.py @@ -47,7 +47,7 @@ async def put( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Synchronously creates or updates an encryption scope under the specified storage account. If an encryption scope is already created and a subsequent request is issued with different @@ -130,7 +130,7 @@ async def patch( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Update encryption scope properties as specified in the request body. Update fails if the specified encryption scope does not already exist. @@ -207,7 +207,7 @@ async def get( resource_group_name: str, account_name: str, encryption_scope_name: str, - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Returns the properties for the specified encryption scope. @@ -275,7 +275,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.EncryptionScopeListResult"]: """Lists all the encryption scopes available under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_services_operations.py index fb02b98da900..58e8391bae1d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_shares_operations.py index 04b379085566..e1184cdb8c12 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[Union[str, "_models.ListSharesExpand"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -142,7 +142,7 @@ async def create( share_name: str, file_share: "_models.FileShare", expand: Optional[str] = "snapshots", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -228,7 +228,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -306,7 +306,7 @@ async def get( share_name: str, expand: Optional[str] = "stats", x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -383,7 +383,7 @@ async def delete( account_name: str, share_name: str, x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. @@ -453,7 +453,7 @@ async def restore( account_name: str, share_name: str, deleted_share: "_models.DeletedShare", - **kwargs + **kwargs: Any ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_management_policies_operations.py index ee89243e8d1e..7af33facc22d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_object_replication_policies_operations.py index 75ce07b70611..a906c358f4fe 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_object_replication_policies_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: """List the object replication policies associated with the storage account. @@ -124,7 +124,7 @@ async def get( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Get the object replication policy of the storage account by policy ID. @@ -192,7 +192,7 @@ async def create_or_update( account_name: str, object_replication_policy_id: str, properties: "_models.ObjectReplicationPolicy", - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Create or update the object replication policy of the storage account. @@ -267,7 +267,7 @@ async def delete( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the object replication policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_operations.py index 2dc76eec21b8..f113b28ee1ef 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_endpoint_connections_operations.py index 876b640fd299..b965d4e311b2 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_endpoint_connections_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: """List all the private endpoint connections associated with the storage account. @@ -123,7 +123,7 @@ async def get( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Gets the specified private endpoint connection associated with the storage account. @@ -191,7 +191,7 @@ async def put( account_name: str, private_endpoint_connection_name: str, properties: "_models.PrivateEndpointConnection", - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Update the state of specified private endpoint connection associated with the storage account. @@ -265,7 +265,7 @@ async def delete( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the specified private endpoint connection associated with the storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_link_resources_operations.py index ea51308267f2..5a6ade98542b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_private_link_resources_operations.py @@ -44,7 +44,7 @@ async def list_by_storage_account( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateLinkResourceListResult": """Gets the private link resources that need to be created for a storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_operations.py index d7de946fcdae..d8541e40852e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_operations.py @@ -47,7 +47,7 @@ async def create( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -123,7 +123,7 @@ async def update( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -198,7 +198,7 @@ async def get( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Gets the queue with the specified queue name, under the specified account if it exists. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the queue with the specified queue name, under the specified account if it exists. @@ -332,7 +332,7 @@ def list( account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListQueueResource"]: """Gets a list of all the queues under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_services_operations.py index b8aaaad75149..0eb1beeee8e6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_queue_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListQueueServices": """List all queue services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.QueueServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Sets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Gets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_skus_operations.py index 682e54407335..814c41264640 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_storage_accounts_operations.py index c3481d0f4f91..38d4184b62d4 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2020_08_01_preview.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -366,7 +366,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -439,7 +439,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -507,7 +507,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -581,7 +581,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -648,7 +648,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -718,7 +718,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -787,7 +787,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -855,7 +855,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -897,7 +897,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -912,8 +912,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -967,7 +967,7 @@ async def _restore_blob_ranges_initial( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> "_models.BlobRestoreStatus": cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] error_map = { @@ -1024,7 +1024,7 @@ async def begin_restore_blob_ranges( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: """Restore blobs in the specified blob ranges. @@ -1039,8 +1039,8 @@ async def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2020_08_01_preview.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) @@ -1097,7 +1097,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_operations.py index 98c16bd52ecc..cbdcab3a72ba 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_operations.py @@ -46,7 +46,7 @@ async def create( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -113,7 +113,7 @@ async def update( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -180,7 +180,7 @@ async def get( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Gets the table with the specified table name, under the specified account if it exists. @@ -247,7 +247,7 @@ async def delete( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the table with the specified table name, under the specified account if it exists. @@ -310,7 +310,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListTableResource"]: """Gets a list of all the tables under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_services_operations.py index a8f6ea36f694..f239cb8720cb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_table_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListTableServices": """List all table services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.TableServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Sets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Gets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_usages_operations.py index 21474cd2c4ca..c612906d7006 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models.py index 0aa65c5d984a..194dcd985483 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models.py @@ -1969,7 +1969,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -2106,20 +2106,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2127,14 +2125,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -4291,7 +4288,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5164,15 +5161,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2020_08_01_preview.models.State @@ -5180,7 +5176,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5189,12 +5184,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models_py3.py index 9b24eaff7a60..d7cabcdaa929 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/models/_models_py3.py @@ -2111,7 +2111,7 @@ class Identity(msrest.serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Required. The identity type. Default value: "SystemAssigned". + :ivar type: The identity type. Has constant value: "SystemAssigned". :vartype type: str """ @@ -2254,20 +2254,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2275,16 +2273,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -4620,7 +4618,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5549,15 +5547,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2020_08_01_preview.models.State @@ -5565,7 +5562,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5574,15 +5570,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/operations/_storage_accounts_operations.py index 8f376d64494e..7db5cf712d13 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2020_08_01_preview/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2020_08_01_preview.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -930,8 +930,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -1059,8 +1059,8 @@ def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2020_08_01_preview.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_containers_operations.py index 7b744f7f57a6..5ec25418be87 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_containers_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, include: Optional[Union[str, "_models.ListContainersInclude"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -142,7 +142,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -224,7 +224,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -300,7 +300,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -368,7 +368,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -432,7 +432,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -510,7 +510,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -588,7 +588,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -679,7 +679,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -759,7 +759,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -840,7 +840,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -918,7 +918,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1007,7 +1007,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_inventory_policies_operations.py index 3c3e77c1c44d..0db3d9b56f5d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_inventory_policies_operations.py @@ -46,7 +46,7 @@ async def get( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Gets the blob inventory policy associated with the specified storage account. @@ -114,7 +114,7 @@ async def create_or_update( account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], properties: "_models.BlobInventoryPolicy", - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Sets the blob inventory policy to the specified storage account. @@ -188,7 +188,7 @@ async def delete( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the blob inventory policy associated with the specified storage account. @@ -251,7 +251,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: """Gets the blob inventory policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_services_operations.py index 118fa5102a16..a574ef16887d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_deleted_accounts_operations.py index 53b6c0f0dfa4..caedd8cb02e0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_deleted_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_deleted_accounts_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DeletedAccountListResult"]: """Lists deleted accounts under the subscription. @@ -112,7 +112,7 @@ async def get( self, deleted_account_name: str, location: str, - **kwargs + **kwargs: Any ) -> "_models.DeletedAccount": """Get properties of specified deleted account resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_encryption_scopes_operations.py index 13af980ee616..23a478dcc082 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_encryption_scopes_operations.py @@ -47,7 +47,7 @@ async def put( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Synchronously creates or updates an encryption scope under the specified storage account. If an encryption scope is already created and a subsequent request is issued with different @@ -130,7 +130,7 @@ async def patch( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Update encryption scope properties as specified in the request body. Update fails if the specified encryption scope does not already exist. @@ -207,7 +207,7 @@ async def get( resource_group_name: str, account_name: str, encryption_scope_name: str, - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Returns the properties for the specified encryption scope. @@ -275,7 +275,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.EncryptionScopeListResult"]: """Lists all the encryption scopes available under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_services_operations.py index 6119b29eafb9..766c575c1a8a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_shares_operations.py index 75a5afcc8fc6..24a24b197fdb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[Union[str, "_models.ListSharesExpand"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -142,7 +142,7 @@ async def create( share_name: str, file_share: "_models.FileShare", expand: Optional[Union[str, "_models.PutSharesExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -228,7 +228,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -306,7 +306,7 @@ async def get( share_name: str, expand: Optional[str] = "stats", x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -383,7 +383,7 @@ async def delete( account_name: str, share_name: str, x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. @@ -453,7 +453,7 @@ async def restore( account_name: str, share_name: str, deleted_share: "_models.DeletedShare", - **kwargs + **kwargs: Any ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_management_policies_operations.py index 7a0921c93ce4..1ea23a614d2b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_object_replication_policies_operations.py index 245ba2166fd8..21ab00380dff 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_object_replication_policies_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: """List the object replication policies associated with the storage account. @@ -124,7 +124,7 @@ async def get( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Get the object replication policy of the storage account by policy ID. @@ -192,7 +192,7 @@ async def create_or_update( account_name: str, object_replication_policy_id: str, properties: "_models.ObjectReplicationPolicy", - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Create or update the object replication policy of the storage account. @@ -267,7 +267,7 @@ async def delete( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the object replication policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_operations.py index a530a43e9ab8..8c61f7a1cef6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_endpoint_connections_operations.py index e5e87b908f6a..8672b403dac4 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_endpoint_connections_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: """List all the private endpoint connections associated with the storage account. @@ -123,7 +123,7 @@ async def get( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Gets the specified private endpoint connection associated with the storage account. @@ -191,7 +191,7 @@ async def put( account_name: str, private_endpoint_connection_name: str, properties: "_models.PrivateEndpointConnection", - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Update the state of specified private endpoint connection associated with the storage account. @@ -265,7 +265,7 @@ async def delete( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the specified private endpoint connection associated with the storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_link_resources_operations.py index 1aa45ba299c7..7ec8b65a6d38 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_private_link_resources_operations.py @@ -44,7 +44,7 @@ async def list_by_storage_account( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateLinkResourceListResult": """Gets the private link resources that need to be created for a storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_operations.py index 6a458c786a06..c65263b21845 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_operations.py @@ -47,7 +47,7 @@ async def create( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -123,7 +123,7 @@ async def update( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -198,7 +198,7 @@ async def get( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Gets the queue with the specified queue name, under the specified account if it exists. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the queue with the specified queue name, under the specified account if it exists. @@ -332,7 +332,7 @@ def list( account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListQueueResource"]: """Gets a list of all the queues under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_services_operations.py index e227d53079de..50d8d539200a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_queue_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListQueueServices": """List all queue services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.QueueServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Sets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Gets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_skus_operations.py index 9b02b3478751..c1d7a093c045 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_storage_accounts_operations.py index cc717da406df..25959d4c323d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2021_01_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -366,7 +366,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -439,7 +439,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -507,7 +507,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -581,7 +581,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -648,7 +648,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -718,7 +718,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -787,7 +787,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -855,7 +855,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -897,7 +897,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -912,8 +912,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -967,7 +967,7 @@ async def _restore_blob_ranges_initial( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> "_models.BlobRestoreStatus": cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] error_map = { @@ -1024,7 +1024,7 @@ async def begin_restore_blob_ranges( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: """Restore blobs in the specified blob ranges. @@ -1039,8 +1039,8 @@ async def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_01_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) @@ -1097,7 +1097,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_operations.py index 2e6bf256ac37..a3a14fac8cac 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_operations.py @@ -46,7 +46,7 @@ async def create( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -113,7 +113,7 @@ async def update( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -180,7 +180,7 @@ async def get( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Gets the table with the specified table name, under the specified account if it exists. @@ -247,7 +247,7 @@ async def delete( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the table with the specified table name, under the specified account if it exists. @@ -310,7 +310,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListTableResource"]: """Gets a list of all the tables under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_services_operations.py index 44d0e79680df..7eae020c5aa5 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_table_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListTableServices": """List all table services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.TableServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Sets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Gets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_usages_operations.py index 4fb31d3652c0..8533cd128a13 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models.py index 82dfb0a2f5e6..21a22e103718 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models.py @@ -2155,20 +2155,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2176,14 +2174,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyVaultProperties(msrest.serialization.Model): @@ -4356,7 +4353,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5259,15 +5256,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_01_01.models.State @@ -5275,7 +5271,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5284,12 +5279,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models_py3.py index 03af40e76c6a..ce32c8537084 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/models/_models_py3.py @@ -2310,20 +2310,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2331,16 +2329,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyVaultProperties(msrest.serialization.Model): @@ -4697,7 +4695,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5657,15 +5655,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_01_01.models.State @@ -5673,7 +5670,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5682,15 +5678,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/operations/_storage_accounts_operations.py index 69abcfc2cf5e..97ccc9fd067e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2021_01_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -930,8 +930,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -1059,8 +1059,8 @@ def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_01_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_containers_operations.py index 93a6259bbb18..7773c9cb187e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_containers_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, include: Optional[Union[str, "_models.ListContainersInclude"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -142,7 +142,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -224,7 +224,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -300,7 +300,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -368,7 +368,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -432,7 +432,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -510,7 +510,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -588,7 +588,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -679,7 +679,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -759,7 +759,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -840,7 +840,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -918,7 +918,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1007,7 +1007,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_inventory_policies_operations.py index 3632455f240c..26347aabee8d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_inventory_policies_operations.py @@ -46,7 +46,7 @@ async def get( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Gets the blob inventory policy associated with the specified storage account. @@ -114,7 +114,7 @@ async def create_or_update( account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], properties: "_models.BlobInventoryPolicy", - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Sets the blob inventory policy to the specified storage account. @@ -188,7 +188,7 @@ async def delete( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the blob inventory policy associated with the specified storage account. @@ -251,7 +251,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: """Gets the blob inventory policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_services_operations.py index 107ccf855fda..eac22724fa55 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_deleted_accounts_operations.py index 196e515bbc06..493edc60ee1c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_deleted_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_deleted_accounts_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DeletedAccountListResult"]: """Lists deleted accounts under the subscription. @@ -112,7 +112,7 @@ async def get( self, deleted_account_name: str, location: str, - **kwargs + **kwargs: Any ) -> "_models.DeletedAccount": """Get properties of specified deleted account resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_encryption_scopes_operations.py index 1cb92ae29889..8e4cfa289175 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_encryption_scopes_operations.py @@ -47,7 +47,7 @@ async def put( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Synchronously creates or updates an encryption scope under the specified storage account. If an encryption scope is already created and a subsequent request is issued with different @@ -130,7 +130,7 @@ async def patch( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Update encryption scope properties as specified in the request body. Update fails if the specified encryption scope does not already exist. @@ -207,7 +207,7 @@ async def get( resource_group_name: str, account_name: str, encryption_scope_name: str, - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Returns the properties for the specified encryption scope. @@ -275,7 +275,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.EncryptionScopeListResult"]: """Lists all the encryption scopes available under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_services_operations.py index bd5806166571..b1a8c6fdfdca 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_shares_operations.py index 6c33de0ea4cb..28fdb0080068 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[Union[str, "_models.ListSharesExpand"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -142,7 +142,7 @@ async def create( share_name: str, file_share: "_models.FileShare", expand: Optional[Union[str, "_models.PutSharesExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -228,7 +228,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -306,7 +306,7 @@ async def get( share_name: str, expand: Optional[str] = "stats", x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -383,7 +383,7 @@ async def delete( account_name: str, share_name: str, x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. @@ -453,7 +453,7 @@ async def restore( account_name: str, share_name: str, deleted_share: "_models.DeletedShare", - **kwargs + **kwargs: Any ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_management_policies_operations.py index bce6df27a310..6ff754af14a1 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_object_replication_policies_operations.py index c3cc925b7205..1cc042f1c30b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_object_replication_policies_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: """List the object replication policies associated with the storage account. @@ -124,7 +124,7 @@ async def get( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Get the object replication policy of the storage account by policy ID. @@ -192,7 +192,7 @@ async def create_or_update( account_name: str, object_replication_policy_id: str, properties: "_models.ObjectReplicationPolicy", - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Create or update the object replication policy of the storage account. @@ -267,7 +267,7 @@ async def delete( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the object replication policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_operations.py index 4036d17d9d95..4defdab67ac3 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_endpoint_connections_operations.py index 83e966eb8b9d..d35508c36cca 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_endpoint_connections_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: """List all the private endpoint connections associated with the storage account. @@ -123,7 +123,7 @@ async def get( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Gets the specified private endpoint connection associated with the storage account. @@ -191,7 +191,7 @@ async def put( account_name: str, private_endpoint_connection_name: str, properties: "_models.PrivateEndpointConnection", - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Update the state of specified private endpoint connection associated with the storage account. @@ -265,7 +265,7 @@ async def delete( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the specified private endpoint connection associated with the storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_link_resources_operations.py index 0313d0ec6e5c..b510330027ad 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_private_link_resources_operations.py @@ -44,7 +44,7 @@ async def list_by_storage_account( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateLinkResourceListResult": """Gets the private link resources that need to be created for a storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_operations.py index b21d186ba979..cfb603403b6e 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_operations.py @@ -47,7 +47,7 @@ async def create( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -123,7 +123,7 @@ async def update( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -198,7 +198,7 @@ async def get( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Gets the queue with the specified queue name, under the specified account if it exists. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the queue with the specified queue name, under the specified account if it exists. @@ -332,7 +332,7 @@ def list( account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListQueueResource"]: """Gets a list of all the queues under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_services_operations.py index fa3a764c26ca..916c0cb85c1d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_queue_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListQueueServices": """List all queue services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.QueueServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Sets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Gets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_skus_operations.py index 5300036c1d52..53e098893fa7 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_storage_accounts_operations.py index 731e6ef4ee70..e60dfe68b399 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2021_02_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -366,7 +366,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -439,7 +439,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -507,7 +507,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -581,7 +581,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -648,7 +648,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -718,7 +718,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -787,7 +787,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -855,7 +855,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -897,7 +897,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -912,8 +912,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -967,7 +967,7 @@ async def _restore_blob_ranges_initial( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> "_models.BlobRestoreStatus": cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] error_map = { @@ -1024,7 +1024,7 @@ async def begin_restore_blob_ranges( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: """Restore blobs in the specified blob ranges. @@ -1039,8 +1039,8 @@ async def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_02_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) @@ -1097,7 +1097,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_operations.py index ca8574cc7a2c..6d851ce472df 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_operations.py @@ -46,7 +46,7 @@ async def create( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -113,7 +113,7 @@ async def update( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -180,7 +180,7 @@ async def get( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Gets the table with the specified table name, under the specified account if it exists. @@ -247,7 +247,7 @@ async def delete( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the table with the specified table name, under the specified account if it exists. @@ -310,7 +310,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListTableResource"]: """Gets a list of all the tables under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_services_operations.py index 97ee89d6311d..fc661d2fd5d0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_table_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListTableServices": """List all table services for the storage account. @@ -106,7 +106,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.TableServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Sets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -178,7 +178,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Gets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_usages_operations.py index 87533c9f9d5f..2f546eb90431 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models.py index 269879ababdb..4dc29f6b362c 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models.py @@ -2155,20 +2155,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2176,14 +2174,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyCreationTime(msrest.serialization.Model): @@ -4450,7 +4447,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5374,15 +5371,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_02_01.models.State @@ -5390,7 +5386,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5399,12 +5394,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models_py3.py index b4465626acd3..080542258ce2 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/models/_models_py3.py @@ -2310,20 +2310,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2331,16 +2329,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyCreationTime(msrest.serialization.Model): @@ -4799,7 +4797,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5784,15 +5782,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "provisioning", "deprovisioning", "succeeded", "failed", "networkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_02_01.models.State @@ -5800,7 +5797,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5809,15 +5805,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/operations/_storage_accounts_operations.py index c48de05eb38d..47c07ebfcb52 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_02_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2021_02_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -930,8 +930,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -1059,8 +1059,8 @@ def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_02_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/_version.py index 8dae91701610..232662316d4d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/_version.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "18.0.0" +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_containers_operations.py index 079288f3887c..a33a46f660c8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_containers_operations.py @@ -50,7 +50,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, include: Optional[Union[str, "_models.ListContainersInclude"]] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListContainerItems"]: """Lists all containers and does not support a prefix like data plane. Also SRP today does not return continuation token. @@ -144,7 +144,7 @@ async def create( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Creates a new container under the specified account as described by request body. The container resource includes metadata and properties for that container. It does not include a list of the @@ -226,7 +226,7 @@ async def update( account_name: str, container_name: str, blob_container: "_models.BlobContainer", - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Updates container properties as specified in request body. Properties not mentioned in the request will be unchanged. Update fails if the specified container doesn't already exist. @@ -302,7 +302,7 @@ async def get( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobContainer": """Gets properties of a specified container. @@ -370,7 +370,7 @@ async def delete( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes specified container under its account. @@ -434,7 +434,7 @@ async def set_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold follows an append pattern and does not clear out the existing tags that are not specified in @@ -512,7 +512,7 @@ async def clear_legal_hold( account_name: str, container_name: str, legal_hold: "_models.LegalHold", - **kwargs + **kwargs: Any ) -> "_models.LegalHold": """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent operation. ClearLegalHold clears out only the specified tags in the request. @@ -590,7 +590,7 @@ async def create_or_update_immutability_policy( container_name: str, if_match: Optional[str] = None, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but not required for this operation. @@ -681,7 +681,7 @@ async def get_immutability_policy( account_name: str, container_name: str, if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Gets the existing immutability policy along with the corresponding ETag in response headers and body. @@ -761,7 +761,7 @@ async def delete_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Aborts an unlocked immutability policy. The response of delete has immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this @@ -842,7 +842,7 @@ async def lock_immutability_policy( account_name: str, container_name: str, if_match: str, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. @@ -920,7 +920,7 @@ async def extend_immutability_policy( container_name: str, if_match: str, parameters: Optional["_models.ImmutabilityPolicy"] = None, - **kwargs + **kwargs: Any ) -> "_models.ImmutabilityPolicy": """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only action allowed on a Locked policy will be this action. ETag in If-Match is required for this @@ -1009,7 +1009,7 @@ async def lease( account_name: str, container_name: str, parameters: Optional["_models.LeaseContainerRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseContainerResponse": """The Lease Container operation establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1088,7 +1088,7 @@ async def _object_level_worm_initial( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -1134,7 +1134,7 @@ async def begin_object_level_worm( resource_group_name: str, account_name: str, container_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """This operation migrates a blob container from container level WORM to object level immutability enabled container. Prerequisites require a container level immutability policy either in locked @@ -1155,8 +1155,8 @@ async def begin_object_level_worm( :type container_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_inventory_policies_operations.py index a8ce9711451e..5db228f296d8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_inventory_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_inventory_policies_operations.py @@ -46,7 +46,7 @@ async def get( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Gets the blob inventory policy associated with the specified storage account. @@ -114,7 +114,7 @@ async def create_or_update( account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], properties: "_models.BlobInventoryPolicy", - **kwargs + **kwargs: Any ) -> "_models.BlobInventoryPolicy": """Sets the blob inventory policy to the specified storage account. @@ -188,7 +188,7 @@ async def delete( resource_group_name: str, account_name: str, blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the blob inventory policy associated with the specified storage account. @@ -251,7 +251,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: """Gets the blob inventory policy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_services_operations.py index 7f9c497b0e1e..afdd15842a39 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_blob_services_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.BlobServiceItems"]: """List blob services of storage account. It returns a collection of one object named default. @@ -123,7 +123,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.BlobServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Sets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -195,7 +195,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.BlobServiceProperties": """Gets the properties of a storage account’s Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_deleted_accounts_operations.py index 66efeb4efa8e..6ef845000d4b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_deleted_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_deleted_accounts_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DeletedAccountListResult"]: """Lists deleted accounts under the subscription. @@ -112,7 +112,7 @@ async def get( self, deleted_account_name: str, location: str, - **kwargs + **kwargs: Any ) -> "_models.DeletedAccount": """Get properties of specified deleted account resource. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_encryption_scopes_operations.py index 045d6e87fcd8..c3e1d35abff2 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_encryption_scopes_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_encryption_scopes_operations.py @@ -47,7 +47,7 @@ async def put( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Synchronously creates or updates an encryption scope under the specified storage account. If an encryption scope is already created and a subsequent request is issued with different @@ -130,7 +130,7 @@ async def patch( account_name: str, encryption_scope_name: str, encryption_scope: "_models.EncryptionScope", - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Update encryption scope properties as specified in the request body. Update fails if the specified encryption scope does not already exist. @@ -207,7 +207,7 @@ async def get( resource_group_name: str, account_name: str, encryption_scope_name: str, - **kwargs + **kwargs: Any ) -> "_models.EncryptionScope": """Returns the properties for the specified encryption scope. @@ -275,7 +275,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.EncryptionScopeListResult"]: """Lists all the encryption scopes available under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_services_operations.py index 90530b898ac7..42a5083fc2e5 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceItems": """List all file services in storage accounts. @@ -107,7 +107,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.FileServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. @@ -180,7 +180,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.FileServiceProperties": """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_shares_operations.py index 75e8259dddf8..e61cb48fafb6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_shares_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_file_shares_operations.py @@ -48,7 +48,7 @@ def list( maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. @@ -144,7 +144,7 @@ async def create( share_name: str, file_share: "_models.FileShare", expand: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the @@ -232,7 +232,7 @@ async def update( account_name: str, share_name: str, file_share: "_models.FileShare", - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. @@ -311,7 +311,7 @@ async def get( share_name: str, expand: Optional[str] = None, x_ms_snapshot: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.FileShare": """Gets properties of a specified share. @@ -391,7 +391,7 @@ async def delete( share_name: str, x_ms_snapshot: Optional[str] = None, include: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: """Deletes specified share under its account. @@ -471,7 +471,7 @@ async def restore( account_name: str, share_name: str, deleted_share: "_models.DeletedShare", - **kwargs + **kwargs: Any ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. @@ -546,7 +546,7 @@ async def lease( share_name: str, x_ms_snapshot: Optional[str] = None, parameters: Optional["_models.LeaseShareRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.LeaseShareResponse": """The Lease Share operation establishes and manages a lock on a share for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_management_policies_operations.py index 42305fc68478..d351d125a0f6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_management_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_management_policies_operations.py @@ -45,7 +45,7 @@ async def get( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Gets the managementpolicy associated with the specified storage account. @@ -112,7 +112,7 @@ async def create_or_update( account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], properties: "_models.ManagementPolicy", - **kwargs + **kwargs: Any ) -> "_models.ManagementPolicy": """Sets the managementpolicy to the specified storage account. @@ -185,7 +185,7 @@ async def delete( resource_group_name: str, account_name: str, management_policy_name: Union[str, "_models.ManagementPolicyName"], - **kwargs + **kwargs: Any ) -> None: """Deletes the managementpolicy associated with the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_object_replication_policies_operations.py index 2ff0064e0806..0d318fb806b7 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_object_replication_policies_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: """List the object replication policies associated with the storage account. @@ -124,7 +124,7 @@ async def get( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Get the object replication policy of the storage account by policy ID. @@ -135,8 +135,10 @@ async def get( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ObjectReplicationPolicy, or the result of cls(response) @@ -192,7 +194,7 @@ async def create_or_update( account_name: str, object_replication_policy_id: str, properties: "_models.ObjectReplicationPolicy", - **kwargs + **kwargs: Any ) -> "_models.ObjectReplicationPolicy": """Create or update the object replication policy of the storage account. @@ -203,8 +205,10 @@ async def create_or_update( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :param properties: The object replication policy set to a storage account. A unique policy ID will be created if absent. @@ -267,7 +271,7 @@ async def delete( resource_group_name: str, account_name: str, object_replication_policy_id: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the object replication policy associated with the specified storage account. @@ -278,8 +282,10 @@ async def delete( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_operations.py index 1af12ef521f6..ec690abe837d 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Storage Rest API operations. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_endpoint_connections_operations.py index 3862f564a9d9..f9802d859688 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_endpoint_connections_operations.py @@ -45,7 +45,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: """List all the private endpoint connections associated with the storage account. @@ -123,7 +123,7 @@ async def get( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Gets the specified private endpoint connection associated with the storage account. @@ -191,7 +191,7 @@ async def put( account_name: str, private_endpoint_connection_name: str, properties: "_models.PrivateEndpointConnection", - **kwargs + **kwargs: Any ) -> "_models.PrivateEndpointConnection": """Update the state of specified private endpoint connection associated with the storage account. @@ -265,7 +265,7 @@ async def delete( resource_group_name: str, account_name: str, private_endpoint_connection_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the specified private endpoint connection associated with the storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_link_resources_operations.py index 54748625ee56..a340af4dd0c8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_link_resources_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_private_link_resources_operations.py @@ -44,7 +44,7 @@ async def list_by_storage_account( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.PrivateLinkResourceListResult": """Gets the private link resources that need to be created for a storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_operations.py index 3cda7519420a..d6b2efbd39d6 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_operations.py @@ -47,7 +47,7 @@ async def create( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -124,7 +124,7 @@ async def update( account_name: str, queue_name: str, queue: "_models.StorageQueue", - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Creates a new queue with the specified queue name, under the specified account. @@ -200,7 +200,7 @@ async def get( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> "_models.StorageQueue": """Gets the queue with the specified queue name, under the specified account if it exists. @@ -269,7 +269,7 @@ async def delete( resource_group_name: str, account_name: str, queue_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the queue with the specified queue name, under the specified account if it exists. @@ -336,7 +336,7 @@ def list( account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListQueueResource"]: """Gets a list of all the queues under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_services_operations.py index cacf051c38df..015027550a17 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_queue_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListQueueServices": """List all queue services for the storage account. @@ -107,7 +107,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.QueueServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Sets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -180,7 +180,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.QueueServiceProperties": """Gets the properties of a storage account’s Queue service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_skus_operations.py index 3898207133a9..9250537c5fdb 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_skus_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_skus_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageSkuListResult"]: """Lists the available SKUs supported by Microsoft.Storage for given subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_storage_accounts_operations.py index 272f2767d750..f4380271a954 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_storage_accounts_operations.py @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def check_name_availability( self, account_name: "_models.StorageAccountCheckNameAvailabilityParameters", - **kwargs + **kwargs: Any ) -> "_models.CheckNameAvailabilityResult": """Checks that the storage account name is valid and is not already in use. @@ -108,7 +108,7 @@ async def _create_initial( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> Optional["_models.StorageAccount"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] error_map = { @@ -163,7 +163,7 @@ async def begin_create( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountCreateParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.StorageAccount"]: """Asynchronously creates a new storage account with the specified parameters. If an account is already created and a subsequent create request is issued with different properties, the @@ -181,8 +181,8 @@ async def begin_create( :type parameters: ~azure.mgmt.storage.v2021_04_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) @@ -239,7 +239,7 @@ async def delete( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a storage account in Microsoft Azure. @@ -296,7 +296,7 @@ async def get_properties( resource_group_name: str, account_name: str, expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """Returns the properties for the specified storage account including but not limited to name, SKU name, location, and account status. The ListKeys operation should be used to retrieve storage @@ -366,7 +366,7 @@ async def update( resource_group_name: str, account_name: str, parameters: "_models.StorageAccountUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccount": """The update operation can be used to update the SKU, encryption, access tier, or tags for a storage account. It can also be used to map the account to a custom domain. Only one custom @@ -439,7 +439,7 @@ async def update( def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the subscription. Note that storage keys are not returned; use the ListKeys operation for this. @@ -507,7 +507,7 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.StorageAccountListResult"]: """Lists all the storage accounts available under the given resource group. Note that storage keys are not returned; use the ListKeys operation for this. @@ -581,7 +581,7 @@ async def list_keys( resource_group_name: str, account_name: str, expand: Optional[str] = "kerb", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage account. @@ -648,7 +648,7 @@ async def regenerate_key( resource_group_name: str, account_name: str, regenerate_key: "_models.StorageAccountRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.StorageAccountListKeysResult": """Regenerates one of the access keys or Kerberos keys for the specified storage account. @@ -718,7 +718,7 @@ async def list_account_sas( resource_group_name: str, account_name: str, parameters: "_models.AccountSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListAccountSasResponse": """List SAS credentials of a storage account. @@ -787,7 +787,7 @@ async def list_service_sas( resource_group_name: str, account_name: str, parameters: "_models.ServiceSasParameters", - **kwargs + **kwargs: Any ) -> "_models.ListServiceSasResponse": """List service SAS credentials of a specific resource. @@ -855,7 +855,7 @@ async def _failover_initial( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -897,7 +897,7 @@ async def begin_failover( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Failover request can be triggered for a storage account in case of availability issues. The failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS @@ -912,8 +912,8 @@ async def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -967,7 +967,7 @@ async def _restore_blob_ranges_initial( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> "_models.BlobRestoreStatus": cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] error_map = { @@ -1024,7 +1024,7 @@ async def begin_restore_blob_ranges( resource_group_name: str, account_name: str, parameters: "_models.BlobRestoreParameters", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: """Restore blobs in the specified blob ranges. @@ -1039,8 +1039,8 @@ async def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_04_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) @@ -1097,7 +1097,7 @@ async def revoke_user_delegation_keys( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> None: """Revoke user delegation keys. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_operations.py index d1984fc59295..ab2f3fc20397 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_operations.py @@ -46,7 +46,7 @@ async def create( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -114,7 +114,7 @@ async def update( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Creates a new table with the specified table name, under the specified account. @@ -182,7 +182,7 @@ async def get( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> "_models.Table": """Gets the table with the specified table name, under the specified account if it exists. @@ -250,7 +250,7 @@ async def delete( resource_group_name: str, account_name: str, table_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes the table with the specified table name, under the specified account if it exists. @@ -314,7 +314,7 @@ def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ListTableResource"]: """Gets a list of all the tables under the specified storage account. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_services_operations.py index 5f37c7629f54..dae40c4d10f8 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_services_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_services_operations.py @@ -44,7 +44,7 @@ async def list( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.ListTableServices": """List all table services for the storage account. @@ -107,7 +107,7 @@ async def set_service_properties( resource_group_name: str, account_name: str, parameters: "_models.TableServiceProperties", - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Sets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -180,7 +180,7 @@ async def get_service_properties( self, resource_group_name: str, account_name: str, - **kwargs + **kwargs: Any ) -> "_models.TableServiceProperties": """Gets the properties of a storage account’s Table service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_usages_operations.py index 042ac6700289..fab2699d7255 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_usages_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_usages_operations.py @@ -44,7 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list_by_location( self, location: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.UsageListResult"]: """Gets the current usage count and the limit for the resources of the location under the subscription. diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models.py index 1f01f9dc9f94..91eabb7bbf9a 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models.py @@ -13,17 +13,17 @@ class AccessPolicy(msrest.serialization.Model): """AccessPolicy. - :param start: Start time of the access policy. - :type start: ~datetime.datetime - :param expiry: Expiry time of the access policy. - :type expiry: ~datetime.datetime + :param start_time: Start time of the access policy. + :type start_time: ~datetime.datetime + :param expiry_time: Expiry time of the access policy. + :type expiry_time: ~datetime.datetime :param permission: List of abbreviated permissions. :type permission: str """ _attribute_map = { - 'start': {'key': 'start', 'type': 'iso-8601'}, - 'expiry': {'key': 'expiry', 'type': 'iso-8601'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'permission': {'key': 'permission', 'type': 'str'}, } @@ -32,8 +32,8 @@ def __init__( **kwargs ): super(AccessPolicy, self).__init__(**kwargs) - self.start = kwargs.get('start', None) - self.expiry = kwargs.get('expiry', None) + self.start_time = kwargs.get('start_time', None) + self.expiry_time = kwargs.get('expiry_time', None) self.permission = kwargs.get('permission', None) @@ -2361,20 +2361,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2382,14 +2380,13 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) class KeyCreationTime(msrest.serialization.Model): @@ -4762,7 +4759,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -5696,15 +5693,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "Provisioning", "Deprovisioning", "Succeeded", "Failed", "NetworkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_04_01.models.State @@ -5712,7 +5708,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -5721,12 +5716,11 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models_py3.py index 19352c5a60df..606d5e5a1d72 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models_py3.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/models/_models_py3.py @@ -18,31 +18,31 @@ class AccessPolicy(msrest.serialization.Model): """AccessPolicy. - :param start: Start time of the access policy. - :type start: ~datetime.datetime - :param expiry: Expiry time of the access policy. - :type expiry: ~datetime.datetime + :param start_time: Start time of the access policy. + :type start_time: ~datetime.datetime + :param expiry_time: Expiry time of the access policy. + :type expiry_time: ~datetime.datetime :param permission: List of abbreviated permissions. :type permission: str """ _attribute_map = { - 'start': {'key': 'start', 'type': 'iso-8601'}, - 'expiry': {'key': 'expiry', 'type': 'iso-8601'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'permission': {'key': 'permission', 'type': 'str'}, } def __init__( self, *, - start: Optional[datetime.datetime] = None, - expiry: Optional[datetime.datetime] = None, + start_time: Optional[datetime.datetime] = None, + expiry_time: Optional[datetime.datetime] = None, permission: Optional[str] = None, **kwargs ): super(AccessPolicy, self).__init__(**kwargs) - self.start = start - self.expiry = expiry + self.start_time = start_time + self.expiry_time = expiry_time self.permission = permission @@ -2537,20 +2537,18 @@ def __init__( class IPRule(msrest.serialization.Model): """IP rule with specific IP or IP range in CIDR format. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. :type ip_address_or_range: str - :ivar action: The action of IP ACL rule. Default value: "Allow". - :vartype action: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str """ _validation = { 'ip_address_or_range': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -2558,16 +2556,16 @@ class IPRule(msrest.serialization.Model): 'action': {'key': 'action', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, ip_address_or_range: str, + action: Optional[str] = None, **kwargs ): super(IPRule, self).__init__(**kwargs) self.ip_address_or_range = ip_address_or_range + self.action = action class KeyCreationTime(msrest.serialization.Model): @@ -5146,7 +5144,7 @@ class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): :param name: Required. The storage account name. :type name: str - :ivar type: Required. The type of resource, Microsoft.Storage/storageAccounts. Default value: + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: "Microsoft.Storage/storageAccounts". :vartype type: str """ @@ -6143,15 +6141,14 @@ def __init__( class VirtualNetworkRule(msrest.serialization.Model): """Virtual Network rule. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. :type virtual_network_resource_id: str - :ivar action: The action of virtual network rule. Default value: "Allow". - :vartype action: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str :param state: Gets the state of virtual network rule. Possible values include: "Provisioning", "Deprovisioning", "Succeeded", "Failed", "NetworkSourceDeleted". :type state: str or ~azure.mgmt.storage.v2021_04_01.models.State @@ -6159,7 +6156,6 @@ class VirtualNetworkRule(msrest.serialization.Model): _validation = { 'virtual_network_resource_id': {'required': True}, - 'action': {'constant': True}, } _attribute_map = { @@ -6168,15 +6164,15 @@ class VirtualNetworkRule(msrest.serialization.Model): 'state': {'key': 'state', 'type': 'str'}, } - action = "Allow" - def __init__( self, *, virtual_network_resource_id: str, + action: Optional[str] = None, state: Optional[Union[str, "State"]] = None, **kwargs ): super(VirtualNetworkRule, self).__init__(**kwargs) self.virtual_network_resource_id = virtual_network_resource_id + self.action = action self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_blob_containers_operations.py index 784eb597d0b9..462b148f34db 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_blob_containers_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_blob_containers_operations.py @@ -1174,8 +1174,8 @@ def begin_object_level_worm( :type container_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_object_replication_policies_operations.py index a76b2e476381..e153de6c391b 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_object_replication_policies_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_object_replication_policies_operations.py @@ -141,8 +141,10 @@ def get( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ObjectReplicationPolicy, or the result of cls(response) @@ -210,8 +212,10 @@ def create_or_update( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :param properties: The object replication policy set to a storage account. A unique policy ID will be created if absent. @@ -286,8 +290,10 @@ def delete( Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :type account_name: str - :param object_replication_policy_id: The ID of object replication policy or 'default' if the - policy ID is unknown. + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. :type object_replication_policy_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_storage_accounts_operations.py index 19bbf14057b8..78e7c7f39bb0 100644 --- a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_storage_accounts_operations.py +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/operations/_storage_accounts_operations.py @@ -188,8 +188,8 @@ def begin_create( :type parameters: ~azure.mgmt.storage.v2021_04_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) @@ -930,8 +930,8 @@ def begin_failover( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -1059,8 +1059,8 @@ def begin_restore_blob_ranges( :type parameters: ~azure.mgmt.storage.v2021_04_01.models.BlobRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: Pass in True if you'd like the ARMPolling polling method, - False for no polling, or your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/__init__.py new file mode 100644 index 000000000000..a50d1e1c39a7 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_management_client import StorageManagementClient +from ._version import VERSION + +__version__ = VERSION +__all__ = ['StorageManagementClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_configuration.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_configuration.py new file mode 100644 index 000000000000..8c02e8f6f624 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_configuration.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + + +class StorageManagementClientConfiguration(Configuration): + """Configuration for StorageManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-06-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-storage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_metadata.json b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_metadata.json new file mode 100644 index 000000000000..5c2931eb4a70 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_metadata.json @@ -0,0 +1,121 @@ +{ + "chosen_version": "2021-06-01", + "total_api_version_list": ["2021-06-01"], + "client": { + "name": "StorageManagementClient", + "filename": "_storage_management_client", + "description": "The Azure Storage Management API.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StorageManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StorageManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "operations": "Operations", + "skus": "SkusOperations", + "storage_accounts": "StorageAccountsOperations", + "deleted_accounts": "DeletedAccountsOperations", + "usages": "UsagesOperations", + "management_policies": "ManagementPoliciesOperations", + "blob_inventory_policies": "BlobInventoryPoliciesOperations", + "private_endpoint_connections": "PrivateEndpointConnectionsOperations", + "private_link_resources": "PrivateLinkResourcesOperations", + "object_replication_policies": "ObjectReplicationPoliciesOperations", + "encryption_scopes": "EncryptionScopesOperations", + "blob_services": "BlobServicesOperations", + "blob_containers": "BlobContainersOperations", + "file_services": "FileServicesOperations", + "file_shares": "FileSharesOperations", + "queue_services": "QueueServicesOperations", + "queue": "QueueOperations", + "table_services": "TableServicesOperations", + "table": "TableOperations" + } +} \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_storage_management_client.py new file mode 100644 index 000000000000..94287daed865 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_storage_management_client.py @@ -0,0 +1,179 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from ._configuration import StorageManagementClientConfiguration +from .operations import Operations +from .operations import SkusOperations +from .operations import StorageAccountsOperations +from .operations import DeletedAccountsOperations +from .operations import UsagesOperations +from .operations import ManagementPoliciesOperations +from .operations import BlobInventoryPoliciesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import PrivateLinkResourcesOperations +from .operations import ObjectReplicationPoliciesOperations +from .operations import EncryptionScopesOperations +from .operations import BlobServicesOperations +from .operations import BlobContainersOperations +from .operations import FileServicesOperations +from .operations import FileSharesOperations +from .operations import QueueServicesOperations +from .operations import QueueOperations +from .operations import TableServicesOperations +from .operations import TableOperations +from . import models + + +class StorageManagementClient(object): + """The Azure Storage Management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.storage.v2021_06_01.operations.Operations + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.storage.v2021_06_01.operations.SkusOperations + :ivar storage_accounts: StorageAccountsOperations operations + :vartype storage_accounts: azure.mgmt.storage.v2021_06_01.operations.StorageAccountsOperations + :ivar deleted_accounts: DeletedAccountsOperations operations + :vartype deleted_accounts: azure.mgmt.storage.v2021_06_01.operations.DeletedAccountsOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.storage.v2021_06_01.operations.UsagesOperations + :ivar management_policies: ManagementPoliciesOperations operations + :vartype management_policies: azure.mgmt.storage.v2021_06_01.operations.ManagementPoliciesOperations + :ivar blob_inventory_policies: BlobInventoryPoliciesOperations operations + :vartype blob_inventory_policies: azure.mgmt.storage.v2021_06_01.operations.BlobInventoryPoliciesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure.mgmt.storage.v2021_06_01.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.storage.v2021_06_01.operations.PrivateLinkResourcesOperations + :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations + :vartype object_replication_policies: azure.mgmt.storage.v2021_06_01.operations.ObjectReplicationPoliciesOperations + :ivar encryption_scopes: EncryptionScopesOperations operations + :vartype encryption_scopes: azure.mgmt.storage.v2021_06_01.operations.EncryptionScopesOperations + :ivar blob_services: BlobServicesOperations operations + :vartype blob_services: azure.mgmt.storage.v2021_06_01.operations.BlobServicesOperations + :ivar blob_containers: BlobContainersOperations operations + :vartype blob_containers: azure.mgmt.storage.v2021_06_01.operations.BlobContainersOperations + :ivar file_services: FileServicesOperations operations + :vartype file_services: azure.mgmt.storage.v2021_06_01.operations.FileServicesOperations + :ivar file_shares: FileSharesOperations operations + :vartype file_shares: azure.mgmt.storage.v2021_06_01.operations.FileSharesOperations + :ivar queue_services: QueueServicesOperations operations + :vartype queue_services: azure.mgmt.storage.v2021_06_01.operations.QueueServicesOperations + :ivar queue: QueueOperations operations + :vartype queue: azure.mgmt.storage.v2021_06_01.operations.QueueOperations + :ivar table_services: TableServicesOperations operations + :vartype table_services: azure.mgmt.storage.v2021_06_01.operations.TableServicesOperations + :ivar table: TableOperations operations + :vartype table: azure.mgmt.storage.v2021_06_01.operations.TableOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.skus = SkusOperations( + self._client, self._config, self._serialize, self._deserialize) + self.storage_accounts = StorageAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.deleted_accounts = DeletedAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.management_policies = ManagementPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_inventory_policies = BlobInventoryPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.object_replication_policies = ObjectReplicationPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.encryption_scopes = EncryptionScopesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_services = BlobServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_containers = BlobContainersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.file_services = FileServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.file_shares = FileSharesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.queue_services = QueueServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.queue = QueueOperations( + self._client, self._config, self._serialize, self._deserialize) + self.table_services = TableServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.table = TableOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> StorageManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_version.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_version.py new file mode 100644 index 000000000000..232662316d4d --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "19.0.0" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/__init__.py new file mode 100644 index 000000000000..9cfe0ace1ba9 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_management_client import StorageManagementClient +__all__ = ['StorageManagementClient'] diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_configuration.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_configuration.py new file mode 100644 index 000000000000..480fd3067ddf --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_configuration.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class StorageManagementClientConfiguration(Configuration): + """Configuration for StorageManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2021-06-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-storage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_storage_management_client.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_storage_management_client.py new file mode 100644 index 000000000000..367b5f44657b --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/_storage_management_client.py @@ -0,0 +1,172 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import StorageManagementClientConfiguration +from .operations import Operations +from .operations import SkusOperations +from .operations import StorageAccountsOperations +from .operations import DeletedAccountsOperations +from .operations import UsagesOperations +from .operations import ManagementPoliciesOperations +from .operations import BlobInventoryPoliciesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import PrivateLinkResourcesOperations +from .operations import ObjectReplicationPoliciesOperations +from .operations import EncryptionScopesOperations +from .operations import BlobServicesOperations +from .operations import BlobContainersOperations +from .operations import FileServicesOperations +from .operations import FileSharesOperations +from .operations import QueueServicesOperations +from .operations import QueueOperations +from .operations import TableServicesOperations +from .operations import TableOperations +from .. import models + + +class StorageManagementClient(object): + """The Azure Storage Management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.storage.v2021_06_01.aio.operations.Operations + :ivar skus: SkusOperations operations + :vartype skus: azure.mgmt.storage.v2021_06_01.aio.operations.SkusOperations + :ivar storage_accounts: StorageAccountsOperations operations + :vartype storage_accounts: azure.mgmt.storage.v2021_06_01.aio.operations.StorageAccountsOperations + :ivar deleted_accounts: DeletedAccountsOperations operations + :vartype deleted_accounts: azure.mgmt.storage.v2021_06_01.aio.operations.DeletedAccountsOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.storage.v2021_06_01.aio.operations.UsagesOperations + :ivar management_policies: ManagementPoliciesOperations operations + :vartype management_policies: azure.mgmt.storage.v2021_06_01.aio.operations.ManagementPoliciesOperations + :ivar blob_inventory_policies: BlobInventoryPoliciesOperations operations + :vartype blob_inventory_policies: azure.mgmt.storage.v2021_06_01.aio.operations.BlobInventoryPoliciesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure.mgmt.storage.v2021_06_01.aio.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.storage.v2021_06_01.aio.operations.PrivateLinkResourcesOperations + :ivar object_replication_policies: ObjectReplicationPoliciesOperations operations + :vartype object_replication_policies: azure.mgmt.storage.v2021_06_01.aio.operations.ObjectReplicationPoliciesOperations + :ivar encryption_scopes: EncryptionScopesOperations operations + :vartype encryption_scopes: azure.mgmt.storage.v2021_06_01.aio.operations.EncryptionScopesOperations + :ivar blob_services: BlobServicesOperations operations + :vartype blob_services: azure.mgmt.storage.v2021_06_01.aio.operations.BlobServicesOperations + :ivar blob_containers: BlobContainersOperations operations + :vartype blob_containers: azure.mgmt.storage.v2021_06_01.aio.operations.BlobContainersOperations + :ivar file_services: FileServicesOperations operations + :vartype file_services: azure.mgmt.storage.v2021_06_01.aio.operations.FileServicesOperations + :ivar file_shares: FileSharesOperations operations + :vartype file_shares: azure.mgmt.storage.v2021_06_01.aio.operations.FileSharesOperations + :ivar queue_services: QueueServicesOperations operations + :vartype queue_services: azure.mgmt.storage.v2021_06_01.aio.operations.QueueServicesOperations + :ivar queue: QueueOperations operations + :vartype queue: azure.mgmt.storage.v2021_06_01.aio.operations.QueueOperations + :ivar table_services: TableServicesOperations operations + :vartype table_services: azure.mgmt.storage.v2021_06_01.aio.operations.TableServicesOperations + :ivar table: TableOperations operations + :vartype table: azure.mgmt.storage.v2021_06_01.aio.operations.TableOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.skus = SkusOperations( + self._client, self._config, self._serialize, self._deserialize) + self.storage_accounts = StorageAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.deleted_accounts = DeletedAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.management_policies = ManagementPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_inventory_policies = BlobInventoryPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.object_replication_policies = ObjectReplicationPoliciesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.encryption_scopes = EncryptionScopesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_services = BlobServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.blob_containers = BlobContainersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.file_services = FileServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.file_shares = FileSharesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.queue_services = QueueServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.queue = QueueOperations( + self._client, self._config, self._serialize, self._deserialize) + self.table_services = TableServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.table = TableOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "StorageManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/__init__.py new file mode 100644 index 000000000000..bddcf8c8cb34 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/__init__.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._skus_operations import SkusOperations +from ._storage_accounts_operations import StorageAccountsOperations +from ._deleted_accounts_operations import DeletedAccountsOperations +from ._usages_operations import UsagesOperations +from ._management_policies_operations import ManagementPoliciesOperations +from ._blob_inventory_policies_operations import BlobInventoryPoliciesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._object_replication_policies_operations import ObjectReplicationPoliciesOperations +from ._encryption_scopes_operations import EncryptionScopesOperations +from ._blob_services_operations import BlobServicesOperations +from ._blob_containers_operations import BlobContainersOperations +from ._file_services_operations import FileServicesOperations +from ._file_shares_operations import FileSharesOperations +from ._queue_services_operations import QueueServicesOperations +from ._queue_operations import QueueOperations +from ._table_services_operations import TableServicesOperations +from ._table_operations import TableOperations + +__all__ = [ + 'Operations', + 'SkusOperations', + 'StorageAccountsOperations', + 'DeletedAccountsOperations', + 'UsagesOperations', + 'ManagementPoliciesOperations', + 'BlobInventoryPoliciesOperations', + 'PrivateEndpointConnectionsOperations', + 'PrivateLinkResourcesOperations', + 'ObjectReplicationPoliciesOperations', + 'EncryptionScopesOperations', + 'BlobServicesOperations', + 'BlobContainersOperations', + 'FileServicesOperations', + 'FileSharesOperations', + 'QueueServicesOperations', + 'QueueOperations', + 'TableServicesOperations', + 'TableOperations', +] diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_containers_operations.py new file mode 100644 index 000000000000..172f9af84cdb --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_containers_operations.py @@ -0,0 +1,1208 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlobContainersOperations: + """BlobContainersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + account_name: str, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + include: Optional[Union[str, "_models.ListContainersInclude"]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ListContainerItems"]: + """Lists all containers and does not support a prefix like data plane. Also SRP today does not + return continuation token. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional. Specified maximum number of containers that can be included in + the list. + :type maxpagesize: str + :param filter: Optional. When specified, only container names starting with the filter will be + listed. + :type filter: str + :param include: Optional, used to include the properties for soft deleted blob containers. + :type include: str or ~azure.mgmt.storage.v2021_06_01.models.ListContainersInclude + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListContainerItems or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListContainerItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainerItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if include is not None: + query_parameters['$include'] = self._serialize.query("include", include, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListContainerItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers'} # type: ignore + + async def create( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: "_models.BlobContainer", + **kwargs: Any + ) -> "_models.BlobContainer": + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param blob_container: Properties of the blob container to create. + :type blob_container: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blob_container, 'BlobContainer') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + account_name: str, + container_name: str, + blob_container: "_models.BlobContainer", + **kwargs: Any + ) -> "_models.BlobContainer": + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param blob_container: Properties to update for the blob container. + :type blob_container: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blob_container, 'BlobContainer') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + container_name: str, + **kwargs: Any + ) -> "_models.BlobContainer": + """Gets properties of a specified container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + container_name: str, + **kwargs: Any + ) -> None: + """Deletes specified container under its account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + async def set_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: "_models.LegalHold", + **kwargs: Any + ) -> "_models.LegalHold": + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. + :type legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LegalHold, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(legal_hold, 'LegalHold') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LegalHold', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold'} # type: ignore + + async def clear_legal_hold( + self, + resource_group_name: str, + account_name: str, + container_name: str, + legal_hold: "_models.LegalHold", + **kwargs: Any + ) -> "_models.LegalHold": + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. + :type legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LegalHold, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.clear_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(legal_hold, 'LegalHold') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LegalHold', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + clear_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold'} # type: ignore + + async def create_or_update_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + if_match: Optional[str] = None, + parameters: Optional["_models.ImmutabilityPolicy"] = None, + **kwargs: Any + ) -> "_models.ImmutabilityPolicy": + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'ImmutabilityPolicy') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_update_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + async def get_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.ImmutabilityPolicy": + """Gets the existing immutability policy along with the corresponding ETag in response headers and + body. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + async def delete_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + if_match: str, + **kwargs: Any + ) -> "_models.ImmutabilityPolicy": + """Aborts an unlocked immutability policy. The response of delete has + immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this + operation. Deleting a locked immutability policy is not allowed, the only way is to delete the + container after deleting all expired blobs inside the policy locked container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + delete_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + async def lock_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + if_match: str, + **kwargs: Any + ) -> "_models.ImmutabilityPolicy": + """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is + ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.lock_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + lock_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock'} # type: ignore + + async def extend_immutability_policy( + self, + resource_group_name: str, + account_name: str, + container_name: str, + if_match: str, + parameters: Optional["_models.ImmutabilityPolicy"] = None, + **kwargs: Any + ) -> "_models.ImmutabilityPolicy": + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob + container. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.extend_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'ImmutabilityPolicy') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + extend_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend'} # type: ignore + + async def lease( + self, + resource_group_name: str, + account_name: str, + container_name: str, + parameters: Optional["_models.LeaseContainerRequest"] = None, + **kwargs: Any + ) -> "_models.LeaseContainerResponse": + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param parameters: Lease Container request body. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LeaseContainerResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseContainerResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.lease.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'LeaseContainerRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LeaseContainerResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease'} # type: ignore + + async def _object_level_worm_initial( + self, + resource_group_name: str, + account_name: str, + container_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._object_level_worm_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _object_level_worm_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate'} # type: ignore + + async def begin_object_level_worm( + self, + resource_group_name: str, + account_name: str, + container_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """This operation migrates a blob container from container level WORM to object level immutability + enabled container. Prerequisites require a container level immutability policy either in locked + or unlocked state, Account level versioning must be enabled and there should be no Legal hold + on the container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._object_level_worm_initial( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_object_level_worm.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_inventory_policies_operations.py new file mode 100644 index 000000000000..b7a5ce46e6fc --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_inventory_policies_operations.py @@ -0,0 +1,326 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlobInventoryPoliciesOperations: + """BlobInventoryPoliciesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], + **kwargs: Any + ) -> "_models.BlobInventoryPolicy": + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobInventoryPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobInventoryPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], + properties: "_models.BlobInventoryPolicy", + **kwargs: Any + ) -> "_models.BlobInventoryPolicy": + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobInventoryPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'BlobInventoryPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobInventoryPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + blob_inventory_policy_name: Union[str, "_models.BlobInventoryPolicyName"], + **kwargs: Any + ) -> None: + """Deletes the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.ListBlobInventoryPolicy"]: + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListBlobInventoryPolicy or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListBlobInventoryPolicy] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListBlobInventoryPolicy', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_services_operations.py new file mode 100644 index 000000000000..f0fbc55dbce3 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_blob_services_operations.py @@ -0,0 +1,256 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlobServicesOperations: + """BlobServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.BlobServiceItems"]: + """List blob services of storage account. It returns a collection of one object named default. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BlobServiceItems or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.BlobServiceItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('BlobServiceItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices'} # type: ignore + + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.BlobServiceProperties", + **kwargs: Any + ) -> "_models.BlobServiceProperties": + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + blob_services_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'BlobServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore + + async def get_service_properties( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.BlobServiceProperties": + """Gets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + blob_services_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_deleted_accounts_operations.py new file mode 100644 index 000000000000..49f4da0cb024 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_deleted_accounts_operations.py @@ -0,0 +1,168 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DeletedAccountsOperations: + """DeletedAccountsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.DeletedAccountListResult"]: + """Lists deleted accounts under the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeletedAccountListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.DeletedAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DeletedAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/deletedAccounts'} # type: ignore + + async def get( + self, + deleted_account_name: str, + location: str, + **kwargs: Any + ) -> "_models.DeletedAccount": + """Get properties of specified deleted account resource. + + :param deleted_account_name: Name of the deleted storage account. + :type deleted_account_name: str + :param location: The location of the deleted storage account. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeletedAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.DeletedAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'deletedAccountName': self._serialize.url("deleted_account_name", deleted_account_name, 'str', max_length=24, min_length=3), + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DeletedAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/deletedAccounts/{deletedAccountName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_encryption_scopes_operations.py new file mode 100644 index 000000000000..52d2223b17f0 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_encryption_scopes_operations.py @@ -0,0 +1,349 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class EncryptionScopesOperations: + """EncryptionScopesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def put( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: "_models.EncryptionScope", + **kwargs: Any + ) -> "_models.EncryptionScope": + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + :type encryption_scope: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(encryption_scope, 'EncryptionScope') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + async def patch( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + encryption_scope: "_models.EncryptionScope", + **kwargs: Any + ) -> "_models.EncryptionScope": + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. + :type encryption_scope: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(encryption_scope, 'EncryptionScope') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + encryption_scope_name: str, + **kwargs: Any + ) -> "_models.EncryptionScope": + """Returns the properties for the specified encryption scope. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.EncryptionScopeListResult"]: + """Lists all the encryption scopes available under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EncryptionScopeListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScopeListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('EncryptionScopeListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_services_operations.py new file mode 100644 index 000000000000..ddd8fb80b703 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_services_operations.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FileServicesOperations: + """FileServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.FileServiceItems": + """List all file services in storage accounts. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceItems, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceItems + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceItems', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices'} # type: ignore + + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.FileServiceProperties", + **kwargs: Any + ) -> "_models.FileServiceProperties": + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + file_services_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'FileServicesName': self._serialize.url("file_services_name", file_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'FileServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore + + async def get_service_properties( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.FileServiceProperties": + """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + file_services_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'FileServicesName': self._serialize.url("file_services_name", file_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_shares_operations.py new file mode 100644 index 000000000000..ac5907431bc4 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_file_shares_operations.py @@ -0,0 +1,628 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FileSharesOperations: + """FileSharesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + account_name: str, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + expand: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.FileShareItems"]: + """Lists all shares. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional. Specified maximum number of shares that can be included in the + list. + :type maxpagesize: str + :param filter: Optional. When specified, only share names starting with the filter will be + listed. + :type filter: str + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: deleted, snapshots. Should be passed as a string with delimiter ','. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FileShareItems or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.FileShareItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShareItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('FileShareItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares'} # type: ignore + + async def create( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: "_models.FileShare", + expand: Optional[str] = None, + **kwargs: Any + ) -> "_models.FileShare": + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param file_share: Properties of the file share to create. + :type file_share: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: snapshots. Should be passed as a string with delimiter ','. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(file_share, 'FileShare') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FileShare', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + account_name: str, + share_name: str, + file_share: "_models.FileShare", + **kwargs: Any + ) -> "_models.FileShare": + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param file_share: Properties to update for the file share. + :type file_share: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(file_share, 'FileShare') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + share_name: str, + expand: Optional[str] = None, + x_ms_snapshot: Optional[str] = None, + **kwargs: Any + ) -> "_models.FileShare": + """Gets properties of a specified share. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: stats. Should be passed as a string with delimiter ','. + :type expand: str + :param x_ms_snapshot: Optional, used to retrieve properties of a snapshot. + :type x_ms_snapshot: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + share_name: str, + x_ms_snapshot: Optional[str] = None, + include: Optional[str] = None, + **kwargs: Any + ) -> None: + """Deletes specified share under its account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param x_ms_snapshot: Optional, used to delete a snapshot. + :type x_ms_snapshot: str + :param include: Optional. Valid values are: snapshots, leased-snapshots, none. The default + value is snapshots. For 'snapshots', the file share is deleted including all of its file share + snapshots. If the file share contains leased-snapshots, the deletion fails. For + 'leased-snapshots', the file share is deleted included all of its file share snapshots + (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the + file share contains any snapshots (leased or unleased), the deletion fails. + :type include: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if include is not None: + query_parameters['$include'] = self._serialize.query("include", include, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + async def restore( + self, + resource_group_name: str, + account_name: str, + share_name: str, + deleted_share: "_models.DeletedShare", + **kwargs: Any + ) -> None: + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param deleted_share: + :type deleted_share: ~azure.mgmt.storage.v2021_06_01.models.DeletedShare + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.restore.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(deleted_share, 'DeletedShare') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + restore.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/restore'} # type: ignore + + async def lease( + self, + resource_group_name: str, + account_name: str, + share_name: str, + x_ms_snapshot: Optional[str] = None, + parameters: Optional["_models.LeaseShareRequest"] = None, + **kwargs: Any + ) -> "_models.LeaseShareResponse": + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. + :type x_ms_snapshot: str + :param parameters: Lease Share request body. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.LeaseShareRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LeaseShareResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LeaseShareResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseShareResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.lease.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'LeaseShareRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('LeaseShareResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/lease'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_management_policies_operations.py new file mode 100644 index 000000000000..03fd46c48440 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_management_policies_operations.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagementPoliciesOperations: + """ManagementPoliciesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, "_models.ManagementPolicyName"], + **kwargs: Any + ) -> "_models.ManagementPolicy": + """Gets the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagementPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagementPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, "_models.ManagementPolicyName"], + properties: "_models.ManagementPolicy", + **kwargs: Any + ) -> "_models.ManagementPolicy": + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagementPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'ManagementPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagementPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + management_policy_name: Union[str, "_models.ManagementPolicyName"], + **kwargs: Any + ) -> None: + """Deletes the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_object_replication_policies_operations.py new file mode 100644 index 000000000000..506bafc80e94 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_object_replication_policies_operations.py @@ -0,0 +1,333 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ObjectReplicationPoliciesOperations: + """ObjectReplicationPoliciesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.ObjectReplicationPolicies"]: + """List the object replication policies associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ObjectReplicationPolicies or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicies] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicies"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ObjectReplicationPolicies', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + **kwargs: Any + ) -> "_models.ObjectReplicationPolicy": + """Get the object replication policy of the storage account by policy ID. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ObjectReplicationPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + properties: "_models.ObjectReplicationPolicy", + **kwargs: Any + ) -> "_models.ObjectReplicationPolicy": + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ObjectReplicationPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'ObjectReplicationPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + object_replication_policy_id: str, + **kwargs: Any + ) -> None: + """Deletes the object replication policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_operations.py new file mode 100644 index 000000000000..133a07c0c985 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: + """Lists all of the available Storage Rest API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.Storage/operations'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..ee226514b995 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,325 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations: + """PrivateEndpointConnectionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: + """List all the private endpoint connections associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + """Gets the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def put( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + properties: "_models.PrivateEndpointConnection", + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> None: + """Deletes the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..b6f32442c750 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_private_link_resources_operations.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations: + """PrivateLinkResourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list_by_storage_account( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.PrivateLinkResourceListResult": + """Gets the private link resources that need to be created for a storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourceListResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateLinkResourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list_by_storage_account.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_storage_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateLinkResources'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_operations.py new file mode 100644 index 000000000000..11c52db9f103 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_operations.py @@ -0,0 +1,421 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class QueueOperations: + """QueueOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: "_models.StorageQueue", + **kwargs: Any + ) -> "_models.StorageQueue": + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(queue, 'StorageQueue') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + queue: "_models.StorageQueue", + **kwargs: Any + ) -> "_models.StorageQueue": + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(queue, 'StorageQueue') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + **kwargs: Any + ) -> "_models.StorageQueue": + """Gets the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + queue_name: str, + **kwargs: Any + ) -> None: + """Deletes the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + def list( + self, + resource_group_name: str, + account_name: str, + maxpagesize: Optional[str] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ListQueueResource"]: + """Gets a list of all the queues under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional, a maximum number of queues that should be included in a list + queue response. + :type maxpagesize: str + :param filter: Optional, When specified, only the queues with a name starting with the given + filter will be listed. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListQueueResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListQueueResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListQueueResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListQueueResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_services_operations.py new file mode 100644 index 000000000000..5a8fd0891c6e --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_queue_services_operations.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class QueueServicesOperations: + """QueueServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.ListQueueServices": + """List all queue services for the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListQueueServices, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListQueueServices + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListQueueServices"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListQueueServices', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices'} # type: ignore + + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.QueueServiceProperties", + **kwargs: Any + ) -> "_models.QueueServiceProperties": + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: QueueServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueueServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + queue_service_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueServiceName': self._serialize.url("queue_service_name", queue_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'QueueServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueueServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}'} # type: ignore + + async def get_service_properties( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.QueueServiceProperties": + """Gets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: QueueServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueueServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + queue_service_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueServiceName': self._serialize.url("queue_service_name", queue_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueueServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_skus_operations.py new file mode 100644 index 000000000000..450186f16430 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_skus_operations.py @@ -0,0 +1,108 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SkusOperations: + """SkusOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.StorageSkuListResult"]: + """Lists the available SKUs supported by Microsoft.Storage for given subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageSkuListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageSkuListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSkuListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StorageSkuListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/skus'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_storage_accounts_operations.py new file mode 100644 index 000000000000..326fdd9c4230 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_storage_accounts_operations.py @@ -0,0 +1,1383 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class StorageAccountsOperations: + """StorageAccountsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def check_name_availability( + self, + account_name: "_models.StorageAccountCheckNameAvailabilityParameters", + **kwargs: Any + ) -> "_models.CheckNameAvailabilityResult": + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountCheckNameAvailabilityParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(account_name, 'StorageAccountCheckNameAvailabilityParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability'} # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.StorageAccountCreateParameters", + **kwargs: Any + ) -> Optional["_models.StorageAccount"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + async def begin_create( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.StorageAccountCreateParameters", + **kwargs: Any + ) -> AsyncLROPoller["_models.StorageAccount"]: + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for the created account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountCreateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.v2021_06_01.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> None: + """Deletes a storage account in Microsoft Azure. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + async def get_properties( + self, + resource_group_name: str, + account_name: str, + expand: Optional[Union[str, "_models.StorageAccountExpand"]] = None, + **kwargs: Any + ) -> "_models.StorageAccount": + """Returns the properties for the specified storage account including but not limited to name, SKU + name, location, and account status. The ListKeys operation should be used to retrieve storage + keys. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param expand: May be used to expand the properties within account's properties. By default, + data is not included when fetching properties. Currently we only support geoReplicationStats + and blobRestoreStatus. + :type expand: str or ~azure.mgmt.storage.v2021_06_01.models.StorageAccountExpand + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.StorageAccountUpdateParameters", + **kwargs: Any + ) -> "_models.StorageAccount": + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for the updated account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.StorageAccountListResult"]: + """Lists all the storage accounts available under the subscription. Note that storage keys are not + returned; use the ListKeys operation for this. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageAccountListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StorageAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.StorageAccountListResult"]: + """Lists all the storage accounts available under the given resource group. Note that storage keys + are not returned; use the ListKeys operation for this. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageAccountListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StorageAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts'} # type: ignore + + async def list_keys( + self, + resource_group_name: str, + account_name: str, + expand: Optional[str] = "kerb", + **kwargs: Any + ) -> "_models.StorageAccountListKeysResult": + """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage + account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param expand: Specifies type of the key to be listed. Possible value is kerb. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccountListKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountListKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys'} # type: ignore + + async def regenerate_key( + self, + resource_group_name: str, + account_name: str, + regenerate_key: "_models.StorageAccountRegenerateKeyParameters", + **kwargs: Any + ) -> "_models.StorageAccountListKeysResult": + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. + :type regenerate_key: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountRegenerateKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccountListKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountListKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_key.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key, 'StorageAccountRegenerateKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey'} # type: ignore + + async def list_account_sas( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.AccountSasParameters", + **kwargs: Any + ) -> "_models.ListAccountSasResponse": + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.AccountSasParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListAccountSasResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListAccountSasResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAccountSasResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.list_account_sas.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AccountSasParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListAccountSasResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_account_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas'} # type: ignore + + async def list_service_sas( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.ServiceSasParameters", + **kwargs: Any + ) -> "_models.ListServiceSasResponse": + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ServiceSasParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListServiceSasResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListServiceSasResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListServiceSasResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.list_service_sas.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ServiceSasParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListServiceSasResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_service_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas'} # type: ignore + + async def _failover_initial( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self._failover_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _failover_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignore + + async def begin_failover( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Failover request can be triggered for a storage account in case of availability issues. The + failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS + accounts. The secondary cluster will become primary after failover. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._failover_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_failover.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignore + + async def _hierarchical_namespace_migration_initial( + self, + resource_group_name: str, + account_name: str, + request_type: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._hierarchical_namespace_migration_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + query_parameters['requestType'] = self._serialize.query("request_type", request_type, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _hierarchical_namespace_migration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration'} # type: ignore + + async def begin_hierarchical_namespace_migration( + self, + resource_group_name: str, + account_name: str, + request_type: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param request_type: Required. Hierarchical namespace migration type can either be a + hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request + 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the + hydration request will migrate the account. + :type request_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + request_type=request_type, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_hierarchical_namespace_migration.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration'} # type: ignore + + async def _abort_hierarchical_namespace_migration_initial( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._abort_hierarchical_namespace_migration_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _abort_hierarchical_namespace_migration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration'} # type: ignore + + async def begin_abort_hierarchical_namespace_migration( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Abort live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._abort_hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_abort_hierarchical_namespace_migration.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration'} # type: ignore + + async def _restore_blob_ranges_initial( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.BlobRestoreParameters", + **kwargs: Any + ) -> "_models.BlobRestoreStatus": + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._restore_blob_ranges_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'BlobRestoreParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _restore_blob_ranges_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges'} # type: ignore + + async def begin_restore_blob_ranges( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.BlobRestoreParameters", + **kwargs: Any + ) -> AsyncLROPoller["_models.BlobRestoreStatus"]: + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BlobRestoreStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.v2021_06_01.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._restore_blob_ranges_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_restore_blob_ranges.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges'} # type: ignore + + async def revoke_user_delegation_keys( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> None: + """Revoke user delegation keys. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.revoke_user_delegation_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + revoke_user_delegation_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_operations.py new file mode 100644 index 000000000000..634b8f9b649d --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_operations.py @@ -0,0 +1,389 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TableOperations: + """TableOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + resource_group_name: str, + account_name: str, + table_name: str, + **kwargs: Any + ) -> "_models.Table": + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + account_name: str, + table_name: str, + **kwargs: Any + ) -> "_models.Table": + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.patch(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + account_name: str, + table_name: str, + **kwargs: Any + ) -> "_models.Table": + """Gets the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + account_name: str, + table_name: str, + **kwargs: Any + ) -> None: + """Deletes the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.ListTableResource"]: + """Gets a list of all the tables under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListTableResource or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListTableResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListTableResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListTableResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_services_operations.py new file mode 100644 index 000000000000..dbd68eb1ef0c --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_table_services_operations.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TableServicesOperations: + """TableServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.ListTableServices": + """List all table services for the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListTableServices, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListTableServices + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListTableServices"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListTableServices', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices'} # type: ignore + + async def set_service_properties( + self, + resource_group_name: str, + account_name: str, + parameters: "_models.TableServiceProperties", + **kwargs: Any + ) -> "_models.TableServiceProperties": + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + table_service_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableServiceName': self._serialize.url("table_service_name", table_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'TableServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}'} # type: ignore + + async def get_service_properties( + self, + resource_group_name: str, + account_name: str, + **kwargs: Any + ) -> "_models.TableServiceProperties": + """Gets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + table_service_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableServiceName': self._serialize.url("table_service_name", table_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_usages_operations.py new file mode 100644 index 000000000000..6b592ccc4828 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/aio/operations/_usages_operations.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class UsagesOperations: + """UsagesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_location( + self, + location: str, + **kwargs: Any + ) -> AsyncIterable["_models.UsageListResult"]: + """Gets the current usage count and the limit for the resources of the location under the + subscription. + + :param location: The location of the Azure Storage resource. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either UsageListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_06_01.models.UsageListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UsageListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_location.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('UsageListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/usages'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/__init__.py new file mode 100644 index 000000000000..41c730b3261f --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/__init__.py @@ -0,0 +1,559 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AccessPolicy + from ._models_py3 import AccountImmutabilityPolicyProperties + from ._models_py3 import AccountSasParameters + from ._models_py3 import ActiveDirectoryProperties + from ._models_py3 import AzureEntityResource + from ._models_py3 import AzureFilesIdentityBasedAuthentication + from ._models_py3 import BlobContainer + from ._models_py3 import BlobInventoryPolicy + from ._models_py3 import BlobInventoryPolicyDefinition + from ._models_py3 import BlobInventoryPolicyFilter + from ._models_py3 import BlobInventoryPolicyRule + from ._models_py3 import BlobInventoryPolicySchema + from ._models_py3 import BlobRestoreParameters + from ._models_py3 import BlobRestoreRange + from ._models_py3 import BlobRestoreStatus + from ._models_py3 import BlobServiceItems + from ._models_py3 import BlobServiceProperties + from ._models_py3 import ChangeFeed + from ._models_py3 import CheckNameAvailabilityResult + from ._models_py3 import CloudErrorAutoGenerated + from ._models_py3 import CloudErrorBody + from ._models_py3 import CloudErrorBodyAutoGenerated + from ._models_py3 import CorsRule + from ._models_py3 import CorsRules + from ._models_py3 import CustomDomain + from ._models_py3 import DateAfterCreation + from ._models_py3 import DateAfterModification + from ._models_py3 import DeleteRetentionPolicy + from ._models_py3 import DeletedAccount + from ._models_py3 import DeletedAccountListResult + from ._models_py3 import DeletedShare + from ._models_py3 import Dimension + from ._models_py3 import Encryption + from ._models_py3 import EncryptionIdentity + from ._models_py3 import EncryptionScope + from ._models_py3 import EncryptionScopeKeyVaultProperties + from ._models_py3 import EncryptionScopeListResult + from ._models_py3 import EncryptionService + from ._models_py3 import EncryptionServices + from ._models_py3 import Endpoints + from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponseBody + from ._models_py3 import ExtendedLocation + from ._models_py3 import FileServiceItems + from ._models_py3 import FileServiceProperties + from ._models_py3 import FileShare + from ._models_py3 import FileShareItem + from ._models_py3 import FileShareItems + from ._models_py3 import GeoReplicationStats + from ._models_py3 import IPRule + from ._models_py3 import Identity + from ._models_py3 import ImmutabilityPolicy + from ._models_py3 import ImmutabilityPolicyProperties + from ._models_py3 import ImmutableStorageAccount + from ._models_py3 import ImmutableStorageWithVersioning + from ._models_py3 import KeyCreationTime + from ._models_py3 import KeyPolicy + from ._models_py3 import KeyVaultProperties + from ._models_py3 import LastAccessTimeTrackingPolicy + from ._models_py3 import LeaseContainerRequest + from ._models_py3 import LeaseContainerResponse + from ._models_py3 import LeaseShareRequest + from ._models_py3 import LeaseShareResponse + from ._models_py3 import LegalHold + from ._models_py3 import LegalHoldProperties + from ._models_py3 import ListAccountSasResponse + from ._models_py3 import ListBlobInventoryPolicy + from ._models_py3 import ListContainerItem + from ._models_py3 import ListContainerItems + from ._models_py3 import ListQueue + from ._models_py3 import ListQueueResource + from ._models_py3 import ListQueueServices + from ._models_py3 import ListServiceSasResponse + from ._models_py3 import ListTableResource + from ._models_py3 import ListTableServices + from ._models_py3 import ManagementPolicy + from ._models_py3 import ManagementPolicyAction + from ._models_py3 import ManagementPolicyBaseBlob + from ._models_py3 import ManagementPolicyDefinition + from ._models_py3 import ManagementPolicyFilter + from ._models_py3 import ManagementPolicyRule + from ._models_py3 import ManagementPolicySchema + from ._models_py3 import ManagementPolicySnapShot + from ._models_py3 import ManagementPolicyVersion + from ._models_py3 import MetricSpecification + from ._models_py3 import Multichannel + from ._models_py3 import NetworkRuleSet + from ._models_py3 import ObjectReplicationPolicies + from ._models_py3 import ObjectReplicationPolicy + from ._models_py3 import ObjectReplicationPolicyFilter + from ._models_py3 import ObjectReplicationPolicyRule + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateEndpointConnectionListResult + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceListResult + from ._models_py3 import PrivateLinkServiceConnectionState + from ._models_py3 import ProtectedAppendWritesHistory + from ._models_py3 import ProtocolSettings + from ._models_py3 import ProxyResource + from ._models_py3 import QueueServiceProperties + from ._models_py3 import Resource + from ._models_py3 import ResourceAccessRule + from ._models_py3 import RestorePolicyProperties + from ._models_py3 import Restriction + from ._models_py3 import RoutingPreference + from ._models_py3 import SKUCapability + from ._models_py3 import SasPolicy + from ._models_py3 import ServiceSasParameters + from ._models_py3 import ServiceSpecification + from ._models_py3 import SignedIdentifier + from ._models_py3 import Sku + from ._models_py3 import SkuInformation + from ._models_py3 import SmbSetting + from ._models_py3 import StorageAccount + from ._models_py3 import StorageAccountCheckNameAvailabilityParameters + from ._models_py3 import StorageAccountCreateParameters + from ._models_py3 import StorageAccountInternetEndpoints + from ._models_py3 import StorageAccountKey + from ._models_py3 import StorageAccountListKeysResult + from ._models_py3 import StorageAccountListResult + from ._models_py3 import StorageAccountMicrosoftEndpoints + from ._models_py3 import StorageAccountRegenerateKeyParameters + from ._models_py3 import StorageAccountUpdateParameters + from ._models_py3 import StorageQueue + from ._models_py3 import StorageSkuListResult + from ._models_py3 import SystemData + from ._models_py3 import Table + from ._models_py3 import TableServiceProperties + from ._models_py3 import TagFilter + from ._models_py3 import TagProperty + from ._models_py3 import TrackedResource + from ._models_py3 import UpdateHistoryProperty + from ._models_py3 import Usage + from ._models_py3 import UsageListResult + from ._models_py3 import UsageName + from ._models_py3 import UserAssignedIdentity + from ._models_py3 import VirtualNetworkRule +except (SyntaxError, ImportError): + from ._models import AccessPolicy # type: ignore + from ._models import AccountImmutabilityPolicyProperties # type: ignore + from ._models import AccountSasParameters # type: ignore + from ._models import ActiveDirectoryProperties # type: ignore + from ._models import AzureEntityResource # type: ignore + from ._models import AzureFilesIdentityBasedAuthentication # type: ignore + from ._models import BlobContainer # type: ignore + from ._models import BlobInventoryPolicy # type: ignore + from ._models import BlobInventoryPolicyDefinition # type: ignore + from ._models import BlobInventoryPolicyFilter # type: ignore + from ._models import BlobInventoryPolicyRule # type: ignore + from ._models import BlobInventoryPolicySchema # type: ignore + from ._models import BlobRestoreParameters # type: ignore + from ._models import BlobRestoreRange # type: ignore + from ._models import BlobRestoreStatus # type: ignore + from ._models import BlobServiceItems # type: ignore + from ._models import BlobServiceProperties # type: ignore + from ._models import ChangeFeed # type: ignore + from ._models import CheckNameAvailabilityResult # type: ignore + from ._models import CloudErrorAutoGenerated # type: ignore + from ._models import CloudErrorBody # type: ignore + from ._models import CloudErrorBodyAutoGenerated # type: ignore + from ._models import CorsRule # type: ignore + from ._models import CorsRules # type: ignore + from ._models import CustomDomain # type: ignore + from ._models import DateAfterCreation # type: ignore + from ._models import DateAfterModification # type: ignore + from ._models import DeleteRetentionPolicy # type: ignore + from ._models import DeletedAccount # type: ignore + from ._models import DeletedAccountListResult # type: ignore + from ._models import DeletedShare # type: ignore + from ._models import Dimension # type: ignore + from ._models import Encryption # type: ignore + from ._models import EncryptionIdentity # type: ignore + from ._models import EncryptionScope # type: ignore + from ._models import EncryptionScopeKeyVaultProperties # type: ignore + from ._models import EncryptionScopeListResult # type: ignore + from ._models import EncryptionService # type: ignore + from ._models import EncryptionServices # type: ignore + from ._models import Endpoints # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import ErrorResponseBody # type: ignore + from ._models import ExtendedLocation # type: ignore + from ._models import FileServiceItems # type: ignore + from ._models import FileServiceProperties # type: ignore + from ._models import FileShare # type: ignore + from ._models import FileShareItem # type: ignore + from ._models import FileShareItems # type: ignore + from ._models import GeoReplicationStats # type: ignore + from ._models import IPRule # type: ignore + from ._models import Identity # type: ignore + from ._models import ImmutabilityPolicy # type: ignore + from ._models import ImmutabilityPolicyProperties # type: ignore + from ._models import ImmutableStorageAccount # type: ignore + from ._models import ImmutableStorageWithVersioning # type: ignore + from ._models import KeyCreationTime # type: ignore + from ._models import KeyPolicy # type: ignore + from ._models import KeyVaultProperties # type: ignore + from ._models import LastAccessTimeTrackingPolicy # type: ignore + from ._models import LeaseContainerRequest # type: ignore + from ._models import LeaseContainerResponse # type: ignore + from ._models import LeaseShareRequest # type: ignore + from ._models import LeaseShareResponse # type: ignore + from ._models import LegalHold # type: ignore + from ._models import LegalHoldProperties # type: ignore + from ._models import ListAccountSasResponse # type: ignore + from ._models import ListBlobInventoryPolicy # type: ignore + from ._models import ListContainerItem # type: ignore + from ._models import ListContainerItems # type: ignore + from ._models import ListQueue # type: ignore + from ._models import ListQueueResource # type: ignore + from ._models import ListQueueServices # type: ignore + from ._models import ListServiceSasResponse # type: ignore + from ._models import ListTableResource # type: ignore + from ._models import ListTableServices # type: ignore + from ._models import ManagementPolicy # type: ignore + from ._models import ManagementPolicyAction # type: ignore + from ._models import ManagementPolicyBaseBlob # type: ignore + from ._models import ManagementPolicyDefinition # type: ignore + from ._models import ManagementPolicyFilter # type: ignore + from ._models import ManagementPolicyRule # type: ignore + from ._models import ManagementPolicySchema # type: ignore + from ._models import ManagementPolicySnapShot # type: ignore + from ._models import ManagementPolicyVersion # type: ignore + from ._models import MetricSpecification # type: ignore + from ._models import Multichannel # type: ignore + from ._models import NetworkRuleSet # type: ignore + from ._models import ObjectReplicationPolicies # type: ignore + from ._models import ObjectReplicationPolicy # type: ignore + from ._models import ObjectReplicationPolicyFilter # type: ignore + from ._models import ObjectReplicationPolicyRule # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateEndpointConnectionListResult # type: ignore + from ._models import PrivateLinkResource # type: ignore + from ._models import PrivateLinkResourceListResult # type: ignore + from ._models import PrivateLinkServiceConnectionState # type: ignore + from ._models import ProtectedAppendWritesHistory # type: ignore + from ._models import ProtocolSettings # type: ignore + from ._models import ProxyResource # type: ignore + from ._models import QueueServiceProperties # type: ignore + from ._models import Resource # type: ignore + from ._models import ResourceAccessRule # type: ignore + from ._models import RestorePolicyProperties # type: ignore + from ._models import Restriction # type: ignore + from ._models import RoutingPreference # type: ignore + from ._models import SKUCapability # type: ignore + from ._models import SasPolicy # type: ignore + from ._models import ServiceSasParameters # type: ignore + from ._models import ServiceSpecification # type: ignore + from ._models import SignedIdentifier # type: ignore + from ._models import Sku # type: ignore + from ._models import SkuInformation # type: ignore + from ._models import SmbSetting # type: ignore + from ._models import StorageAccount # type: ignore + from ._models import StorageAccountCheckNameAvailabilityParameters # type: ignore + from ._models import StorageAccountCreateParameters # type: ignore + from ._models import StorageAccountInternetEndpoints # type: ignore + from ._models import StorageAccountKey # type: ignore + from ._models import StorageAccountListKeysResult # type: ignore + from ._models import StorageAccountListResult # type: ignore + from ._models import StorageAccountMicrosoftEndpoints # type: ignore + from ._models import StorageAccountRegenerateKeyParameters # type: ignore + from ._models import StorageAccountUpdateParameters # type: ignore + from ._models import StorageQueue # type: ignore + from ._models import StorageSkuListResult # type: ignore + from ._models import SystemData # type: ignore + from ._models import Table # type: ignore + from ._models import TableServiceProperties # type: ignore + from ._models import TagFilter # type: ignore + from ._models import TagProperty # type: ignore + from ._models import TrackedResource # type: ignore + from ._models import UpdateHistoryProperty # type: ignore + from ._models import Usage # type: ignore + from ._models import UsageListResult # type: ignore + from ._models import UsageName # type: ignore + from ._models import UserAssignedIdentity # type: ignore + from ._models import VirtualNetworkRule # type: ignore + +from ._storage_management_client_enums import ( + AccessTier, + AccountImmutabilityPolicyState, + AccountStatus, + BlobInventoryPolicyName, + BlobRestoreProgressStatus, + Bypass, + CorsRuleAllowedMethodsItem, + CreatedByType, + DefaultAction, + DefaultSharePermission, + DirectoryServiceOptions, + EnabledProtocols, + EncryptionScopeSource, + EncryptionScopeState, + ExpirationAction, + ExtendedLocationTypes, + Format, + GeoReplicationStatus, + HttpProtocol, + IdentityType, + ImmutabilityPolicyState, + ImmutabilityPolicyUpdateType, + InventoryRuleType, + KeyPermission, + KeySource, + KeyType, + Kind, + LargeFileSharesState, + LeaseContainerRequestAction, + LeaseDuration, + LeaseShareAction, + LeaseState, + LeaseStatus, + ListContainersInclude, + ManagementPolicyName, + MigrationState, + MinimumTlsVersion, + Name, + ObjectType, + Permissions, + PrivateEndpointConnectionProvisioningState, + PrivateEndpointServiceConnectionStatus, + ProvisioningState, + PublicAccess, + PublicNetworkAccess, + Reason, + ReasonCode, + RootSquashType, + RoutingChoice, + RuleType, + Schedule, + Services, + ShareAccessTier, + SignedResource, + SignedResourceTypes, + SkuName, + SkuTier, + State, + StorageAccountExpand, + UsageUnit, +) + +__all__ = [ + 'AccessPolicy', + 'AccountImmutabilityPolicyProperties', + 'AccountSasParameters', + 'ActiveDirectoryProperties', + 'AzureEntityResource', + 'AzureFilesIdentityBasedAuthentication', + 'BlobContainer', + 'BlobInventoryPolicy', + 'BlobInventoryPolicyDefinition', + 'BlobInventoryPolicyFilter', + 'BlobInventoryPolicyRule', + 'BlobInventoryPolicySchema', + 'BlobRestoreParameters', + 'BlobRestoreRange', + 'BlobRestoreStatus', + 'BlobServiceItems', + 'BlobServiceProperties', + 'ChangeFeed', + 'CheckNameAvailabilityResult', + 'CloudErrorAutoGenerated', + 'CloudErrorBody', + 'CloudErrorBodyAutoGenerated', + 'CorsRule', + 'CorsRules', + 'CustomDomain', + 'DateAfterCreation', + 'DateAfterModification', + 'DeleteRetentionPolicy', + 'DeletedAccount', + 'DeletedAccountListResult', + 'DeletedShare', + 'Dimension', + 'Encryption', + 'EncryptionIdentity', + 'EncryptionScope', + 'EncryptionScopeKeyVaultProperties', + 'EncryptionScopeListResult', + 'EncryptionService', + 'EncryptionServices', + 'Endpoints', + 'ErrorResponse', + 'ErrorResponseBody', + 'ExtendedLocation', + 'FileServiceItems', + 'FileServiceProperties', + 'FileShare', + 'FileShareItem', + 'FileShareItems', + 'GeoReplicationStats', + 'IPRule', + 'Identity', + 'ImmutabilityPolicy', + 'ImmutabilityPolicyProperties', + 'ImmutableStorageAccount', + 'ImmutableStorageWithVersioning', + 'KeyCreationTime', + 'KeyPolicy', + 'KeyVaultProperties', + 'LastAccessTimeTrackingPolicy', + 'LeaseContainerRequest', + 'LeaseContainerResponse', + 'LeaseShareRequest', + 'LeaseShareResponse', + 'LegalHold', + 'LegalHoldProperties', + 'ListAccountSasResponse', + 'ListBlobInventoryPolicy', + 'ListContainerItem', + 'ListContainerItems', + 'ListQueue', + 'ListQueueResource', + 'ListQueueServices', + 'ListServiceSasResponse', + 'ListTableResource', + 'ListTableServices', + 'ManagementPolicy', + 'ManagementPolicyAction', + 'ManagementPolicyBaseBlob', + 'ManagementPolicyDefinition', + 'ManagementPolicyFilter', + 'ManagementPolicyRule', + 'ManagementPolicySchema', + 'ManagementPolicySnapShot', + 'ManagementPolicyVersion', + 'MetricSpecification', + 'Multichannel', + 'NetworkRuleSet', + 'ObjectReplicationPolicies', + 'ObjectReplicationPolicy', + 'ObjectReplicationPolicyFilter', + 'ObjectReplicationPolicyRule', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateEndpointConnectionListResult', + 'PrivateLinkResource', + 'PrivateLinkResourceListResult', + 'PrivateLinkServiceConnectionState', + 'ProtectedAppendWritesHistory', + 'ProtocolSettings', + 'ProxyResource', + 'QueueServiceProperties', + 'Resource', + 'ResourceAccessRule', + 'RestorePolicyProperties', + 'Restriction', + 'RoutingPreference', + 'SKUCapability', + 'SasPolicy', + 'ServiceSasParameters', + 'ServiceSpecification', + 'SignedIdentifier', + 'Sku', + 'SkuInformation', + 'SmbSetting', + 'StorageAccount', + 'StorageAccountCheckNameAvailabilityParameters', + 'StorageAccountCreateParameters', + 'StorageAccountInternetEndpoints', + 'StorageAccountKey', + 'StorageAccountListKeysResult', + 'StorageAccountListResult', + 'StorageAccountMicrosoftEndpoints', + 'StorageAccountRegenerateKeyParameters', + 'StorageAccountUpdateParameters', + 'StorageQueue', + 'StorageSkuListResult', + 'SystemData', + 'Table', + 'TableServiceProperties', + 'TagFilter', + 'TagProperty', + 'TrackedResource', + 'UpdateHistoryProperty', + 'Usage', + 'UsageListResult', + 'UsageName', + 'UserAssignedIdentity', + 'VirtualNetworkRule', + 'AccessTier', + 'AccountImmutabilityPolicyState', + 'AccountStatus', + 'BlobInventoryPolicyName', + 'BlobRestoreProgressStatus', + 'Bypass', + 'CorsRuleAllowedMethodsItem', + 'CreatedByType', + 'DefaultAction', + 'DefaultSharePermission', + 'DirectoryServiceOptions', + 'EnabledProtocols', + 'EncryptionScopeSource', + 'EncryptionScopeState', + 'ExpirationAction', + 'ExtendedLocationTypes', + 'Format', + 'GeoReplicationStatus', + 'HttpProtocol', + 'IdentityType', + 'ImmutabilityPolicyState', + 'ImmutabilityPolicyUpdateType', + 'InventoryRuleType', + 'KeyPermission', + 'KeySource', + 'KeyType', + 'Kind', + 'LargeFileSharesState', + 'LeaseContainerRequestAction', + 'LeaseDuration', + 'LeaseShareAction', + 'LeaseState', + 'LeaseStatus', + 'ListContainersInclude', + 'ManagementPolicyName', + 'MigrationState', + 'MinimumTlsVersion', + 'Name', + 'ObjectType', + 'Permissions', + 'PrivateEndpointConnectionProvisioningState', + 'PrivateEndpointServiceConnectionStatus', + 'ProvisioningState', + 'PublicAccess', + 'PublicNetworkAccess', + 'Reason', + 'ReasonCode', + 'RootSquashType', + 'RoutingChoice', + 'RuleType', + 'Schedule', + 'Services', + 'ShareAccessTier', + 'SignedResource', + 'SignedResourceTypes', + 'SkuName', + 'SkuTier', + 'State', + 'StorageAccountExpand', + 'UsageUnit', +] diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models.py new file mode 100644 index 000000000000..df1e7ef924c7 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models.py @@ -0,0 +1,5943 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AccessPolicy(msrest.serialization.Model): + """AccessPolicy. + + :param start_time: Start time of the access policy. + :type start_time: ~datetime.datetime + :param expiry_time: Expiry time of the access policy. + :type expiry_time: ~datetime.datetime + :param permission: List of abbreviated permissions. + :type permission: str + """ + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'permission': {'key': 'permission', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.expiry_time = kwargs.get('expiry_time', None) + self.permission = kwargs.get('permission', None) + + +class AccountImmutabilityPolicyProperties(msrest.serialization.Model): + """This defines account-level immutability policy properties. + + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :param state: The ImmutabilityPolicy state defines the mode of the policy. Disabled state + disables the policy, Unlocked state allows increase and decrease of immutability retention time + and also allows toggling allowProtectedAppendWrites property, Locked state only allows the + increase of the immutability retention time. A policy can only be created in a Disabled or + Unlocked state and can be toggled between the two states. Only a policy in an Unlocked state + can transition to a Locked state which cannot be reverted. Possible values include: "Unlocked", + "Locked", "Disabled". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.AccountImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for disabled and + unlocked time-based retention policies. When enabled, new blocks can be written to an append + blob while maintaining immutability protection and compliance. Only new blocks can be added and + any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes: bool + """ + + _validation = { + 'immutability_period_since_creation_in_days': {'maximum': 146000, 'minimum': 1}, + } + + _attribute_map = { + 'immutability_period_since_creation_in_days': {'key': 'immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'allowProtectedAppendWrites', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(AccountImmutabilityPolicyProperties, self).__init__(**kwargs) + self.immutability_period_since_creation_in_days = kwargs.get('immutability_period_since_creation_in_days', None) + self.state = kwargs.get('state', None) + self.allow_protected_append_writes = kwargs.get('allow_protected_append_writes', None) + + +class AccountSasParameters(msrest.serialization.Model): + """The parameters to list SAS credentials of a storage account. + + All required parameters must be populated in order to send to Azure. + + :param services: Required. The signed services accessible with the account SAS. Possible values + include: Blob (b), Queue (q), Table (t), File (f). Possible values include: "b", "q", "t", "f". + :type services: str or ~azure.mgmt.storage.v2021_06_01.models.Services + :param resource_types: Required. The signed resource types that are accessible with the account + SAS. Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; + Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. + Possible values include: "s", "c", "o". + :type resource_types: str or ~azure.mgmt.storage.v2021_06_01.models.SignedResourceTypes + :param permissions: Required. The signed permissions for the account SAS. Possible values + include: Read (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process + (p). Possible values include: "r", "d", "w", "l", "a", "c", "u", "p". + :type permissions: str or ~azure.mgmt.storage.v2021_06_01.models.Permissions + :param ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :type ip_address_or_range: str + :param protocols: The protocol permitted for a request made with the account SAS. Possible + values include: "https,http", "https". + :type protocols: str or ~azure.mgmt.storage.v2021_06_01.models.HttpProtocol + :param shared_access_start_time: The time at which the SAS becomes valid. + :type shared_access_start_time: ~datetime.datetime + :param shared_access_expiry_time: Required. The time at which the shared access signature + becomes invalid. + :type shared_access_expiry_time: ~datetime.datetime + :param key_to_sign: The key to sign the account SAS token with. + :type key_to_sign: str + """ + + _validation = { + 'services': {'required': True}, + 'resource_types': {'required': True}, + 'permissions': {'required': True}, + 'shared_access_expiry_time': {'required': True}, + } + + _attribute_map = { + 'services': {'key': 'signedServices', 'type': 'str'}, + 'resource_types': {'key': 'signedResourceTypes', 'type': 'str'}, + 'permissions': {'key': 'signedPermission', 'type': 'str'}, + 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, + 'protocols': {'key': 'signedProtocol', 'type': 'str'}, + 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, + 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, + 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AccountSasParameters, self).__init__(**kwargs) + self.services = kwargs['services'] + self.resource_types = kwargs['resource_types'] + self.permissions = kwargs['permissions'] + self.ip_address_or_range = kwargs.get('ip_address_or_range', None) + self.protocols = kwargs.get('protocols', None) + self.shared_access_start_time = kwargs.get('shared_access_start_time', None) + self.shared_access_expiry_time = kwargs['shared_access_expiry_time'] + self.key_to_sign = kwargs.get('key_to_sign', None) + + +class ActiveDirectoryProperties(msrest.serialization.Model): + """Settings properties for Active Directory (AD). + + All required parameters must be populated in order to send to Azure. + + :param domain_name: Required. Specifies the primary domain that the AD DNS server is + authoritative for. + :type domain_name: str + :param net_bios_domain_name: Required. Specifies the NetBIOS domain name. + :type net_bios_domain_name: str + :param forest_name: Required. Specifies the Active Directory forest to get. + :type forest_name: str + :param domain_guid: Required. Specifies the domain GUID. + :type domain_guid: str + :param domain_sid: Required. Specifies the security identifier (SID). + :type domain_sid: str + :param azure_storage_sid: Required. Specifies the security identifier (SID) for Azure Storage. + :type azure_storage_sid: str + """ + + _validation = { + 'domain_name': {'required': True}, + 'net_bios_domain_name': {'required': True}, + 'forest_name': {'required': True}, + 'domain_guid': {'required': True}, + 'domain_sid': {'required': True}, + 'azure_storage_sid': {'required': True}, + } + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'net_bios_domain_name': {'key': 'netBiosDomainName', 'type': 'str'}, + 'forest_name': {'key': 'forestName', 'type': 'str'}, + 'domain_guid': {'key': 'domainGuid', 'type': 'str'}, + 'domain_sid': {'key': 'domainSid', 'type': 'str'}, + 'azure_storage_sid': {'key': 'azureStorageSid', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ActiveDirectoryProperties, self).__init__(**kwargs) + self.domain_name = kwargs['domain_name'] + self.net_bios_domain_name = kwargs['net_bios_domain_name'] + self.forest_name = kwargs['forest_name'] + self.domain_guid = kwargs['domain_guid'] + self.domain_sid = kwargs['domain_sid'] + self.azure_storage_sid = kwargs['azure_storage_sid'] + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class AzureEntityResource(Resource): + """The resource model definition for an Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class AzureFilesIdentityBasedAuthentication(msrest.serialization.Model): + """Settings for Azure Files identity based authentication. + + All required parameters must be populated in order to send to Azure. + + :param directory_service_options: Required. Indicates the directory service used. Possible + values include: "None", "AADDS", "AD". + :type directory_service_options: str or + ~azure.mgmt.storage.v2021_06_01.models.DirectoryServiceOptions + :param active_directory_properties: Required if choose AD. + :type active_directory_properties: + ~azure.mgmt.storage.v2021_06_01.models.ActiveDirectoryProperties + :param default_share_permission: Default share permission for users using Kerberos + authentication if RBAC role is not assigned. Possible values include: "None", + "StorageFileDataSmbShareReader", "StorageFileDataSmbShareContributor", + "StorageFileDataSmbShareElevatedContributor". + :type default_share_permission: str or + ~azure.mgmt.storage.v2021_06_01.models.DefaultSharePermission + """ + + _validation = { + 'directory_service_options': {'required': True}, + } + + _attribute_map = { + 'directory_service_options': {'key': 'directoryServiceOptions', 'type': 'str'}, + 'active_directory_properties': {'key': 'activeDirectoryProperties', 'type': 'ActiveDirectoryProperties'}, + 'default_share_permission': {'key': 'defaultSharePermission', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFilesIdentityBasedAuthentication, self).__init__(**kwargs) + self.directory_service_options = kwargs['directory_service_options'] + self.active_directory_properties = kwargs.get('active_directory_properties', None) + self.default_share_permission = kwargs.get('default_share_permission', None) + + +class BlobContainer(AzureEntityResource): + """Properties of the blob container, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar version: The version of the deleted blob container. + :vartype version: str + :ivar deleted: Indicates whether the blob container was deleted. + :vartype deleted: bool + :ivar deleted_time: Blob container deletion time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. + :vartype remaining_retention_days: int + :param default_encryption_scope: Default the container to use specified encryption scope for + all writes. + :type default_encryption_scope: str + :param deny_encryption_scope_override: Block override of encryption scope from the container + default. + :type deny_encryption_scope_override: bool + :param public_access: Specifies whether data in the container may be accessed publicly and the + level of access. Possible values include: "Container", "Blob", "None". + :type public_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicAccess + :ivar last_modified_time: Returns the date and time the container was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar lease_status: The lease status of the container. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the container. Possible values include: "Available", + "Leased", "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed + duration, only when the container is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param metadata: A name-value pair to associate with the container as metadata. + :type metadata: dict[str, str] + :ivar immutability_policy: The ImmutabilityPolicy property of the container. + :vartype immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyProperties + :ivar legal_hold: The LegalHold property of the container. + :vartype legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHoldProperties + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP + if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public + property is set to false by SRP if ImmutabilityPolicy has not been created for this container. + :vartype has_immutability_policy: bool + :param immutable_storage_with_versioning: The object level immutability property of the + container. The property is immutable and can only be set to true at the container creation + time. Existing containers must undergo a migration process. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageWithVersioning + :param enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. + :type enable_nfs_v3_root_squash: bool + :param enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. + :type enable_nfs_v3_all_squash: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'immutability_policy': {'readonly': True}, + 'legal_hold': {'readonly': True}, + 'has_legal_hold': {'readonly': True}, + 'has_immutability_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'default_encryption_scope': {'key': 'properties.defaultEncryptionScope', 'type': 'str'}, + 'deny_encryption_scope_override': {'key': 'properties.denyEncryptionScopeOverride', 'type': 'bool'}, + 'public_access': {'key': 'properties.publicAccess', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'immutability_policy': {'key': 'properties.immutabilityPolicy', 'type': 'ImmutabilityPolicyProperties'}, + 'legal_hold': {'key': 'properties.legalHold', 'type': 'LegalHoldProperties'}, + 'has_legal_hold': {'key': 'properties.hasLegalHold', 'type': 'bool'}, + 'has_immutability_policy': {'key': 'properties.hasImmutabilityPolicy', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageWithVersioning'}, + 'enable_nfs_v3_root_squash': {'key': 'properties.enableNfsV3RootSquash', 'type': 'bool'}, + 'enable_nfs_v3_all_squash': {'key': 'properties.enableNfsV3AllSquash', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobContainer, self).__init__(**kwargs) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.deny_encryption_scope_override = kwargs.get('deny_encryption_scope_override', None) + self.public_access = kwargs.get('public_access', None) + self.last_modified_time = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.metadata = kwargs.get('metadata', None) + self.immutability_policy = None + self.legal_hold = None + self.has_legal_hold = None + self.has_immutability_policy = None + self.immutable_storage_with_versioning = kwargs.get('immutable_storage_with_versioning', None) + self.enable_nfs_v3_root_squash = kwargs.get('enable_nfs_v3_root_squash', None) + self.enable_nfs_v3_all_squash = kwargs.get('enable_nfs_v3_all_squash', None) + + +class BlobInventoryPolicy(Resource): + """The storage account blob inventory policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.storage.v2021_06_01.models.SystemData + :ivar last_modified_time: Returns the last modified date and time of the blob inventory policy. + :vartype last_modified_time: ~datetime.datetime + :param policy: The storage account blob inventory policy object. It is composed of policy + rules. + :type policy: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicySchema + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'policy': {'key': 'properties.policy', 'type': 'BlobInventoryPolicySchema'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobInventoryPolicy, self).__init__(**kwargs) + self.system_data = None + self.last_modified_time = None + self.policy = kwargs.get('policy', None) + + +class BlobInventoryPolicyDefinition(msrest.serialization.Model): + """An object that defines the blob inventory rule. + + All required parameters must be populated in order to send to Azure. + + :param filters: An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyFilter + :param format: Required. This is a required field, it specifies the format for the inventory + files. Possible values include: "Csv", "Parquet". + :type format: str or ~azure.mgmt.storage.v2021_06_01.models.Format + :param schedule: Required. This is a required field. This field is used to schedule an + inventory formation. Possible values include: "Daily", "Weekly". + :type schedule: str or ~azure.mgmt.storage.v2021_06_01.models.Schedule + :param object_type: Required. This is a required field. This field specifies the scope of the + inventory created either at the blob or container level. Possible values include: "Blob", + "Container". + :type object_type: str or ~azure.mgmt.storage.v2021_06_01.models.ObjectType + :param schema_fields: Required. This is a required field. This field specifies the fields and + properties of the object to be included in the inventory. The Schema field value 'Name' is + always required. The valid values for this field for the 'Blob' definition.objectType include + 'Name, Creation-Time, Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, + AccessTierChangeTime, AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, + Permissions, Acl, Snapshot, VersionId, IsCurrentVersion, Metadata, LastAccessTime'. The valid + values for 'Container' definition.objectType include 'Name, Last-Modified, Metadata, + LeaseStatus, LeaseState, LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold'. + Schema field values 'Expiry-Time, hdi_isfolder, Owner, Group, Permissions, Acl' are valid only + for Hns enabled accounts.'Tags' field is only valid for non Hns accounts. + :type schema_fields: list[str] + """ + + _validation = { + 'format': {'required': True}, + 'schedule': {'required': True}, + 'object_type': {'required': True}, + 'schema_fields': {'required': True}, + } + + _attribute_map = { + 'filters': {'key': 'filters', 'type': 'BlobInventoryPolicyFilter'}, + 'format': {'key': 'format', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'str'}, + 'object_type': {'key': 'objectType', 'type': 'str'}, + 'schema_fields': {'key': 'schemaFields', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobInventoryPolicyDefinition, self).__init__(**kwargs) + self.filters = kwargs.get('filters', None) + self.format = kwargs['format'] + self.schedule = kwargs['schedule'] + self.object_type = kwargs['object_type'] + self.schema_fields = kwargs['schema_fields'] + + +class BlobInventoryPolicyFilter(msrest.serialization.Model): + """An object that defines the blob inventory rule filter conditions. For 'Blob' definition.objectType all filter properties are applicable, 'blobTypes' is required and others are optional. For 'Container' definition.objectType only prefixMatch is applicable and is optional. + + :param prefix_match: An array of strings for blob prefixes to be matched. + :type prefix_match: list[str] + :param blob_types: An array of predefined enum values. Valid values include blockBlob, + appendBlob, pageBlob. Hns accounts does not support pageBlobs. This field is required when + definition.objectType property is set to 'Blob'. + :type blob_types: list[str] + :param include_blob_versions: Includes blob versions in blob inventory when value is set to + true. The definition.schemaFields values 'VersionId and IsCurrentVersion' are required if this + property is set to true, else they must be excluded. + :type include_blob_versions: bool + :param include_snapshots: Includes blob snapshots in blob inventory when value is set to true. + The definition.schemaFields value 'Snapshot' is required if this property is set to true, else + it must be excluded. + :type include_snapshots: bool + """ + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'blob_types': {'key': 'blobTypes', 'type': '[str]'}, + 'include_blob_versions': {'key': 'includeBlobVersions', 'type': 'bool'}, + 'include_snapshots': {'key': 'includeSnapshots', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobInventoryPolicyFilter, self).__init__(**kwargs) + self.prefix_match = kwargs.get('prefix_match', None) + self.blob_types = kwargs.get('blob_types', None) + self.include_blob_versions = kwargs.get('include_blob_versions', None) + self.include_snapshots = kwargs.get('include_snapshots', None) + + +class BlobInventoryPolicyRule(msrest.serialization.Model): + """An object that wraps the blob inventory rule. Each rule is uniquely defined by name. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Rule is enabled when set to true. + :type enabled: bool + :param name: Required. A rule name can contain any combination of alpha numeric characters. + Rule name is case-sensitive. It must be unique within a policy. + :type name: str + :param destination: Required. Container name where blob inventory files are stored. Must be + pre-created. + :type destination: str + :param definition: Required. An object that defines the blob inventory policy rule. + :type definition: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyDefinition + """ + + _validation = { + 'enabled': {'required': True}, + 'name': {'required': True}, + 'destination': {'required': True}, + 'definition': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'destination': {'key': 'destination', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'BlobInventoryPolicyDefinition'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobInventoryPolicyRule, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.name = kwargs['name'] + self.destination = kwargs['destination'] + self.definition = kwargs['definition'] + + +class BlobInventoryPolicySchema(msrest.serialization.Model): + """The storage account blob inventory policy rules. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Policy is enabled if set to true. + :type enabled: bool + :param type: Required. The valid value is Inventory. Possible values include: "Inventory". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.InventoryRuleType + :param rules: Required. The storage account blob inventory policy rules. The rule is applied + when it is enabled. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyRule] + """ + + _validation = { + 'enabled': {'required': True}, + 'type': {'required': True}, + 'rules': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'type': {'key': 'type', 'type': 'str'}, + 'rules': {'key': 'rules', 'type': '[BlobInventoryPolicyRule]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobInventoryPolicySchema, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.type = kwargs['type'] + self.rules = kwargs['rules'] + + +class BlobRestoreParameters(msrest.serialization.Model): + """Blob restore parameters. + + All required parameters must be populated in order to send to Azure. + + :param time_to_restore: Required. Restore blob to the specified time. + :type time_to_restore: ~datetime.datetime + :param blob_ranges: Required. Blob ranges to restore. + :type blob_ranges: list[~azure.mgmt.storage.v2021_06_01.models.BlobRestoreRange] + """ + + _validation = { + 'time_to_restore': {'required': True}, + 'blob_ranges': {'required': True}, + } + + _attribute_map = { + 'time_to_restore': {'key': 'timeToRestore', 'type': 'iso-8601'}, + 'blob_ranges': {'key': 'blobRanges', 'type': '[BlobRestoreRange]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobRestoreParameters, self).__init__(**kwargs) + self.time_to_restore = kwargs['time_to_restore'] + self.blob_ranges = kwargs['blob_ranges'] + + +class BlobRestoreRange(msrest.serialization.Model): + """Blob range. + + All required parameters must be populated in order to send to Azure. + + :param start_range: Required. Blob start range. This is inclusive. Empty means account start. + :type start_range: str + :param end_range: Required. Blob end range. This is exclusive. Empty means account end. + :type end_range: str + """ + + _validation = { + 'start_range': {'required': True}, + 'end_range': {'required': True}, + } + + _attribute_map = { + 'start_range': {'key': 'startRange', 'type': 'str'}, + 'end_range': {'key': 'endRange', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobRestoreRange, self).__init__(**kwargs) + self.start_range = kwargs['start_range'] + self.end_range = kwargs['end_range'] + + +class BlobRestoreStatus(msrest.serialization.Model): + """Blob restore status. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of blob restore progress. Possible values are: - InProgress: Indicates + that blob restore is ongoing. - Complete: Indicates that blob restore has been completed + successfully. - Failed: Indicates that blob restore is failed. Possible values include: + "InProgress", "Complete", "Failed". + :vartype status: str or ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreProgressStatus + :ivar failure_reason: Failure reason when blob restore is failed. + :vartype failure_reason: str + :ivar restore_id: Id for tracking blob restore request. + :vartype restore_id: str + :ivar parameters: Blob restore request parameters. + :vartype parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreParameters + """ + + _validation = { + 'status': {'readonly': True}, + 'failure_reason': {'readonly': True}, + 'restore_id': {'readonly': True}, + 'parameters': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'failure_reason': {'key': 'failureReason', 'type': 'str'}, + 'restore_id': {'key': 'restoreId', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'BlobRestoreParameters'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobRestoreStatus, self).__init__(**kwargs) + self.status = None + self.failure_reason = None + self.restore_id = None + self.parameters = None + + +class BlobServiceItems(msrest.serialization.Model): + """BlobServiceItems. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blob services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BlobServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobServiceItems, self).__init__(**kwargs) + self.value = None + + +class BlobServiceProperties(Resource): + """The properties of a storage account’s Blob service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param cors: Specifies CORS rules for the Blob service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Blob service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + :param default_service_version: DefaultServiceVersion indicates the default version to use for + requests to the Blob service if an incoming request’s version is not specified. Possible values + include version 2008-10-27 and all more recent versions. + :type default_service_version: str + :param delete_retention_policy: The blob service properties for blob soft delete. + :type delete_retention_policy: ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param is_versioning_enabled: Versioning is enabled if set to true. + :type is_versioning_enabled: bool + :param automatic_snapshot_policy_enabled: Deprecated in favor of isVersioningEnabled property. + :type automatic_snapshot_policy_enabled: bool + :param change_feed: The blob service properties for change feed events. + :type change_feed: ~azure.mgmt.storage.v2021_06_01.models.ChangeFeed + :param restore_policy: The blob service properties for blob restore policy. + :type restore_policy: ~azure.mgmt.storage.v2021_06_01.models.RestorePolicyProperties + :param container_delete_retention_policy: The blob service properties for container soft + delete. + :type container_delete_retention_policy: + ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param last_access_time_tracking_policy: The blob service property to configure last access + time based tracking policy. + :type last_access_time_tracking_policy: + ~azure.mgmt.storage.v2021_06_01.models.LastAccessTimeTrackingPolicy + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'sku': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + 'default_service_version': {'key': 'properties.defaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'properties.deleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'is_versioning_enabled': {'key': 'properties.isVersioningEnabled', 'type': 'bool'}, + 'automatic_snapshot_policy_enabled': {'key': 'properties.automaticSnapshotPolicyEnabled', 'type': 'bool'}, + 'change_feed': {'key': 'properties.changeFeed', 'type': 'ChangeFeed'}, + 'restore_policy': {'key': 'properties.restorePolicy', 'type': 'RestorePolicyProperties'}, + 'container_delete_retention_policy': {'key': 'properties.containerDeleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'last_access_time_tracking_policy': {'key': 'properties.lastAccessTimeTrackingPolicy', 'type': 'LastAccessTimeTrackingPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobServiceProperties, self).__init__(**kwargs) + self.sku = None + self.cors = kwargs.get('cors', None) + self.default_service_version = kwargs.get('default_service_version', None) + self.delete_retention_policy = kwargs.get('delete_retention_policy', None) + self.is_versioning_enabled = kwargs.get('is_versioning_enabled', None) + self.automatic_snapshot_policy_enabled = kwargs.get('automatic_snapshot_policy_enabled', None) + self.change_feed = kwargs.get('change_feed', None) + self.restore_policy = kwargs.get('restore_policy', None) + self.container_delete_retention_policy = kwargs.get('container_delete_retention_policy', None) + self.last_access_time_tracking_policy = kwargs.get('last_access_time_tracking_policy', None) + + +class ChangeFeed(msrest.serialization.Model): + """The blob service properties for change feed events. + + :param enabled: Indicates whether change feed event logging is enabled for the Blob service. + :type enabled: bool + :param retention_in_days: Indicates the duration of changeFeed retention in days. Minimum value + is 1 day and maximum value is 146000 days (400 years). A null value indicates an infinite + retention of the change feed. + :type retention_in_days: int + """ + + _validation = { + 'retention_in_days': {'maximum': 146000, 'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'retention_in_days': {'key': 'retentionInDays', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ChangeFeed, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.retention_in_days = kwargs.get('retention_in_days', None) + + +class CheckNameAvailabilityResult(msrest.serialization.Model): + """The CheckNameAvailability operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name_available: Gets a boolean value that indicates whether the name is available for you + to use. If true, the name is available. If false, the name has already been taken or is invalid + and cannot be used. + :vartype name_available: bool + :ivar reason: Gets the reason that a storage account name could not be used. The Reason element + is only returned if NameAvailable is false. Possible values include: "AccountNameInvalid", + "AlreadyExists". + :vartype reason: str or ~azure.mgmt.storage.v2021_06_01.models.Reason + :ivar message: Gets an error message explaining the Reason value in more detail. + :vartype message: str + """ + + _validation = { + 'name_available': {'readonly': True}, + 'reason': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, + 'reason': {'key': 'reason', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CheckNameAvailabilityResult, self).__init__(**kwargs) + self.name_available = None + self.reason = None + self.message = None + + +class CloudErrorAutoGenerated(msrest.serialization.Model): + """An error response from the Storage service. + + :param error: An error response from the Storage service. + :type error: ~azure.mgmt.storage.v2021_06_01.models.CloudErrorBodyAutoGenerated + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'CloudErrorBodyAutoGenerated'}, + } + + def __init__( + self, + **kwargs + ): + super(CloudErrorAutoGenerated, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class CloudErrorBody(msrest.serialization.Model): + """An error response from the Storage service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure.mgmt.storage.v2021_06_01.models.CloudErrorBody] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[CloudErrorBody]'}, + } + + def __init__( + self, + **kwargs + ): + super(CloudErrorBody, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class CloudErrorBodyAutoGenerated(msrest.serialization.Model): + """An error response from the Storage service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure.mgmt.storage.v2021_06_01.models.CloudErrorBodyAutoGenerated] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[CloudErrorBodyAutoGenerated]'}, + } + + def __init__( + self, + **kwargs + ): + super(CloudErrorBodyAutoGenerated, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class CorsRule(msrest.serialization.Model): + """Specifies a CORS rule for the Blob service. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. Required if CorsRule element is present. A list of origin + domains that will be allowed via CORS, or "*" to allow all domains. + :type allowed_origins: list[str] + :param allowed_methods: Required. Required if CorsRule element is present. A list of HTTP + methods that are allowed to be executed by the origin. + :type allowed_methods: list[str or + ~azure.mgmt.storage.v2021_06_01.models.CorsRuleAllowedMethodsItem] + :param max_age_in_seconds: Required. Required if CorsRule element is present. The number of + seconds that the client/browser should cache a preflight response. + :type max_age_in_seconds: int + :param exposed_headers: Required. Required if CorsRule element is present. A list of response + headers to expose to CORS clients. + :type exposed_headers: list[str] + :param allowed_headers: Required. Required if CorsRule element is present. A list of headers + allowed to be part of the cross-origin request. + :type allowed_headers: list[str] + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'max_age_in_seconds': {'required': True}, + 'exposed_headers': {'required': True}, + 'allowed_headers': {'required': True}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'allowedOrigins', 'type': '[str]'}, + 'allowed_methods': {'key': 'allowedMethods', 'type': '[str]'}, + 'max_age_in_seconds': {'key': 'maxAgeInSeconds', 'type': 'int'}, + 'exposed_headers': {'key': 'exposedHeaders', 'type': '[str]'}, + 'allowed_headers': {'key': 'allowedHeaders', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = kwargs['allowed_origins'] + self.allowed_methods = kwargs['allowed_methods'] + self.max_age_in_seconds = kwargs['max_age_in_seconds'] + self.exposed_headers = kwargs['exposed_headers'] + self.allowed_headers = kwargs['allowed_headers'] + + +class CorsRules(msrest.serialization.Model): + """Sets the CORS rules. You can include up to five CorsRule elements in the request. + + :param cors_rules: The List of CORS rules. You can include up to five CorsRule elements in the + request. + :type cors_rules: list[~azure.mgmt.storage.v2021_06_01.models.CorsRule] + """ + + _attribute_map = { + 'cors_rules': {'key': 'corsRules', 'type': '[CorsRule]'}, + } + + def __init__( + self, + **kwargs + ): + super(CorsRules, self).__init__(**kwargs) + self.cors_rules = kwargs.get('cors_rules', None) + + +class CustomDomain(msrest.serialization.Model): + """The custom domain assigned to this storage account. This can be set via Update. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Gets or sets the custom domain name assigned to the storage account. + Name is the CNAME source. + :type name: str + :param use_sub_domain_name: Indicates whether indirect CName validation is enabled. Default + value is false. This should only be set on updates. + :type use_sub_domain_name: bool + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'use_sub_domain_name': {'key': 'useSubDomainName', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomDomain, self).__init__(**kwargs) + self.name = kwargs['name'] + self.use_sub_domain_name = kwargs.get('use_sub_domain_name', None) + + +class DateAfterCreation(msrest.serialization.Model): + """Object to define the number of days after creation. + + All required parameters must be populated in order to send to Azure. + + :param days_after_creation_greater_than: Required. Value indicating the age in days after + creation. + :type days_after_creation_greater_than: float + """ + + _validation = { + 'days_after_creation_greater_than': {'required': True, 'minimum': 0, 'multiple': 1}, + } + + _attribute_map = { + 'days_after_creation_greater_than': {'key': 'daysAfterCreationGreaterThan', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + super(DateAfterCreation, self).__init__(**kwargs) + self.days_after_creation_greater_than = kwargs['days_after_creation_greater_than'] + + +class DateAfterModification(msrest.serialization.Model): + """Object to define the number of days after object last modification Or last access. Properties daysAfterModificationGreaterThan and daysAfterLastAccessTimeGreaterThan are mutually exclusive. + + :param days_after_modification_greater_than: Value indicating the age in days after last + modification. + :type days_after_modification_greater_than: float + :param days_after_last_access_time_greater_than: Value indicating the age in days after last + blob access. This property can only be used in conjunction with last access time tracking + policy. + :type days_after_last_access_time_greater_than: float + """ + + _validation = { + 'days_after_modification_greater_than': {'minimum': 0, 'multiple': 1}, + 'days_after_last_access_time_greater_than': {'minimum': 0, 'multiple': 1}, + } + + _attribute_map = { + 'days_after_modification_greater_than': {'key': 'daysAfterModificationGreaterThan', 'type': 'float'}, + 'days_after_last_access_time_greater_than': {'key': 'daysAfterLastAccessTimeGreaterThan', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + super(DateAfterModification, self).__init__(**kwargs) + self.days_after_modification_greater_than = kwargs.get('days_after_modification_greater_than', None) + self.days_after_last_access_time_greater_than = kwargs.get('days_after_last_access_time_greater_than', None) + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class DeletedAccount(ProxyResource): + """Deleted storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar storage_account_resource_id: Full resource id of the original storage account. + :vartype storage_account_resource_id: str + :ivar location: Location of the deleted account. + :vartype location: str + :ivar restore_reference: Can be used to attempt recovering this deleted account via + PutStorageAccount API. + :vartype restore_reference: str + :ivar creation_time: Creation time of the deleted account. + :vartype creation_time: str + :ivar deletion_time: Deletion time of the deleted account. + :vartype deletion_time: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'storage_account_resource_id': {'readonly': True}, + 'location': {'readonly': True}, + 'restore_reference': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'deletion_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'}, + 'location': {'key': 'properties.location', 'type': 'str'}, + 'restore_reference': {'key': 'properties.restoreReference', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'str'}, + 'deletion_time': {'key': 'properties.deletionTime', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeletedAccount, self).__init__(**kwargs) + self.storage_account_resource_id = None + self.location = None + self.restore_reference = None + self.creation_time = None + self.deletion_time = None + + +class DeletedAccountListResult(msrest.serialization.Model): + """The response from the List Deleted Accounts operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets the list of deleted accounts and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.DeletedAccount] + :ivar next_link: Request URL that can be used to query next page of deleted accounts. Returned + when total number of requested deleted accounts exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DeletedAccount]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeletedAccountListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class DeletedShare(msrest.serialization.Model): + """The deleted share to be restored. + + All required parameters must be populated in order to send to Azure. + + :param deleted_share_name: Required. Required. Identify the name of the deleted share that will + be restored. + :type deleted_share_name: str + :param deleted_share_version: Required. Required. Identify the version of the deleted share + that will be restored. + :type deleted_share_version: str + """ + + _validation = { + 'deleted_share_name': {'required': True}, + 'deleted_share_version': {'required': True}, + } + + _attribute_map = { + 'deleted_share_name': {'key': 'deletedShareName', 'type': 'str'}, + 'deleted_share_version': {'key': 'deletedShareVersion', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeletedShare, self).__init__(**kwargs) + self.deleted_share_name = kwargs['deleted_share_name'] + self.deleted_share_version = kwargs['deleted_share_version'] + + +class DeleteRetentionPolicy(msrest.serialization.Model): + """The service properties for soft delete. + + :param enabled: Indicates whether DeleteRetentionPolicy is enabled. + :type enabled: bool + :param days: Indicates the number of days that the deleted item should be retained. The minimum + specified value can be 1 and the maximum value can be 365. + :type days: int + """ + + _validation = { + 'days': {'maximum': 365, 'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'days': {'key': 'days', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(DeleteRetentionPolicy, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.days = kwargs.get('days', None) + + +class Dimension(msrest.serialization.Model): + """Dimension of blobs, possibly be blob type or access tier. + + :param name: Display name of dimension. + :type name: str + :param display_name: Display name of dimension. + :type display_name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Dimension, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + + +class Encryption(msrest.serialization.Model): + """The encryption settings on the storage account. + + All required parameters must be populated in order to send to Azure. + + :param services: List of services which support encryption. + :type services: ~azure.mgmt.storage.v2021_06_01.models.EncryptionServices + :param key_source: Required. The encryption keySource (provider). Possible values + (case-insensitive): Microsoft.Storage, Microsoft.Keyvault. Possible values include: + "Microsoft.Storage", "Microsoft.Keyvault". Default value: "Microsoft.Storage". + :type key_source: str or ~azure.mgmt.storage.v2021_06_01.models.KeySource + :param require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :type require_infrastructure_encryption: bool + :param key_vault_properties: Properties provided by key vault. + :type key_vault_properties: ~azure.mgmt.storage.v2021_06_01.models.KeyVaultProperties + :param encryption_identity: The identity to be used with service-side encryption at rest. + :type encryption_identity: ~azure.mgmt.storage.v2021_06_01.models.EncryptionIdentity + """ + + _validation = { + 'key_source': {'required': True}, + } + + _attribute_map = { + 'services': {'key': 'services', 'type': 'EncryptionServices'}, + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, + 'key_vault_properties': {'key': 'keyvaultproperties', 'type': 'KeyVaultProperties'}, + 'encryption_identity': {'key': 'identity', 'type': 'EncryptionIdentity'}, + } + + def __init__( + self, + **kwargs + ): + super(Encryption, self).__init__(**kwargs) + self.services = kwargs.get('services', None) + self.key_source = kwargs.get('key_source', "Microsoft.Storage") + self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.encryption_identity = kwargs.get('encryption_identity', None) + + +class EncryptionIdentity(msrest.serialization.Model): + """Encryption identity for the storage account. + + :param encryption_user_assigned_identity: Resource identifier of the UserAssigned identity to + be associated with server-side encryption on the storage account. + :type encryption_user_assigned_identity: str + """ + + _attribute_map = { + 'encryption_user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionIdentity, self).__init__(**kwargs) + self.encryption_user_assigned_identity = kwargs.get('encryption_user_assigned_identity', None) + + +class EncryptionScope(Resource): + """The Encryption Scope resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param source: The provider for the encryption scope. Possible values (case-insensitive): + Microsoft.Storage, Microsoft.KeyVault. Possible values include: "Microsoft.Storage", + "Microsoft.KeyVault". + :type source: str or ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeSource + :param state: The state of the encryption scope. Possible values (case-insensitive): Enabled, + Disabled. Possible values include: "Enabled", "Disabled". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeState + :ivar creation_time: Gets the creation date and time of the encryption scope in UTC. + :vartype creation_time: ~datetime.datetime + :ivar last_modified_time: Gets the last modification date and time of the encryption scope in + UTC. + :vartype last_modified_time: ~datetime.datetime + :param key_vault_properties: The key vault properties for the encryption scope. This is a + required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. + :type key_vault_properties: + ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeKeyVaultProperties + :param require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :type require_infrastructure_encryption: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source': {'key': 'properties.source', 'type': 'str'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'EncryptionScopeKeyVaultProperties'}, + 'require_infrastructure_encryption': {'key': 'properties.requireInfrastructureEncryption', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionScope, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.state = kwargs.get('state', None) + self.creation_time = None + self.last_modified_time = None + self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) + + +class EncryptionScopeKeyVaultProperties(msrest.serialization.Model): + """The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param key_uri: The object identifier for a key vault key object. When applied, the encryption + scope will use the key referenced by the identifier to enable customer-managed key support on + this encryption scope. + :type key_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + """ + + _validation = { + 'current_versioned_key_identifier': {'readonly': True}, + 'last_key_rotation_timestamp': {'readonly': True}, + } + + _attribute_map = { + 'key_uri': {'key': 'keyUri', 'type': 'str'}, + 'current_versioned_key_identifier': {'key': 'currentVersionedKeyIdentifier', 'type': 'str'}, + 'last_key_rotation_timestamp': {'key': 'lastKeyRotationTimestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionScopeKeyVaultProperties, self).__init__(**kwargs) + self.key_uri = kwargs.get('key_uri', None) + self.current_versioned_key_identifier = None + self.last_key_rotation_timestamp = None + + +class EncryptionScopeListResult(msrest.serialization.Model): + """List of encryption scopes requested, and if paging is required, a URL to the next page of encryption scopes. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of encryption scopes requested. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.EncryptionScope] + :ivar next_link: Request URL that can be used to query next page of encryption scopes. Returned + when total number of requested encryption scopes exceeds the maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EncryptionScope]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionScopeListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class EncryptionService(msrest.serialization.Model): + """A service that allows server-side encryption to be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param enabled: A boolean indicating whether or not the service encrypts the data as it is + stored. + :type enabled: bool + :ivar last_enabled_time: Gets a rough estimate of the date/time when the encryption was last + enabled by the user. Only returned when encryption is enabled. There might be some unencrypted + blobs which were written after this time, as it is just a rough estimate. + :vartype last_enabled_time: ~datetime.datetime + :param key_type: Encryption key type to be used for the encryption service. 'Account' key type + implies that an account-scoped encryption key will be used. 'Service' key type implies that a + default service key is used. Possible values include: "Service", "Account". + :type key_type: str or ~azure.mgmt.storage.v2021_06_01.models.KeyType + """ + + _validation = { + 'last_enabled_time': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'last_enabled_time': {'key': 'lastEnabledTime', 'type': 'iso-8601'}, + 'key_type': {'key': 'keyType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionService, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.last_enabled_time = None + self.key_type = kwargs.get('key_type', None) + + +class EncryptionServices(msrest.serialization.Model): + """A list of services that support encryption. + + :param blob: The encryption function of the blob storage service. + :type blob: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param file: The encryption function of the file storage service. + :type file: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param table: The encryption function of the table storage service. + :type table: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param queue: The encryption function of the queue storage service. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + """ + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'EncryptionService'}, + 'file': {'key': 'file', 'type': 'EncryptionService'}, + 'table': {'key': 'table', 'type': 'EncryptionService'}, + 'queue': {'key': 'queue', 'type': 'EncryptionService'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionServices, self).__init__(**kwargs) + self.blob = kwargs.get('blob', None) + self.file = kwargs.get('file', None) + self.table = kwargs.get('table', None) + self.queue = kwargs.get('queue', None) + + +class Endpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + :param microsoft_endpoints: Gets the microsoft routing storage endpoints. + :type microsoft_endpoints: + ~azure.mgmt.storage.v2021_06_01.models.StorageAccountMicrosoftEndpoints + :param internet_endpoints: Gets the internet routing storage endpoints. + :type internet_endpoints: + ~azure.mgmt.storage.v2021_06_01.models.StorageAccountInternetEndpoints + """ + + _validation = { + 'blob': {'readonly': True}, + 'queue': {'readonly': True}, + 'table': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'queue': {'key': 'queue', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + 'microsoft_endpoints': {'key': 'microsoftEndpoints', 'type': 'StorageAccountMicrosoftEndpoints'}, + 'internet_endpoints': {'key': 'internetEndpoints', 'type': 'StorageAccountInternetEndpoints'}, + } + + def __init__( + self, + **kwargs + ): + super(Endpoints, self).__init__(**kwargs) + self.blob = None + self.queue = None + self.table = None + self.file = None + self.web = None + self.dfs = None + self.microsoft_endpoints = kwargs.get('microsoft_endpoints', None) + self.internet_endpoints = kwargs.get('internet_endpoints', None) + + +class ErrorResponse(msrest.serialization.Model): + """An error response from the storage resource provider. + + :param error: Azure Storage Resource Provider error response body. + :type error: ~azure.mgmt.storage.v2021_06_01.models.ErrorResponseBody + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseBody'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponseBody(msrest.serialization.Model): + """Error response body contract. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseBody, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + + +class ExtendedLocation(msrest.serialization.Model): + """The complex type of the extended location. + + :param name: The name of the extended location. + :type name: str + :param type: The type of the extended location. Possible values include: "EdgeZone". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocationTypes + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLocation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class FileServiceItems(msrest.serialization.Model): + """FileServiceItems. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of file services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServiceItems, self).__init__(**kwargs) + self.value = None + + +class FileServiceProperties(Resource): + """The properties of File services in storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param cors: Specifies CORS rules for the File service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the File service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + :param share_delete_retention_policy: The file service properties for share soft delete. + :type share_delete_retention_policy: + ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param protocol_settings: Protocol settings for file service. + :type protocol_settings: ~azure.mgmt.storage.v2021_06_01.models.ProtocolSettings + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'sku': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + 'share_delete_retention_policy': {'key': 'properties.shareDeleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'protocol_settings': {'key': 'properties.protocolSettings', 'type': 'ProtocolSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServiceProperties, self).__init__(**kwargs) + self.sku = None + self.cors = kwargs.get('cors', None) + self.share_delete_retention_policy = kwargs.get('share_delete_retention_policy', None) + self.protocol_settings = kwargs.get('protocol_settings', None) + + +class FileShare(AzureEntityResource): + """Properties of the file share, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar last_modified_time: Returns the date and time the share was last modified. + :vartype last_modified_time: ~datetime.datetime + :param metadata: A name-value pair to associate with the share as metadata. + :type metadata: dict[str, str] + :param share_quota: The maximum size of the share, in gigabytes. Must be greater than 0, and + less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. + :type share_quota: int + :param enabled_protocols: The authentication protocol that is used for the file share. Can only + be specified when creating a share. Possible values include: "SMB", "NFS". + :type enabled_protocols: str or ~azure.mgmt.storage.v2021_06_01.models.EnabledProtocols + :param root_squash: The property is for NFS share only. The default is NoRootSquash. Possible + values include: "NoRootSquash", "RootSquash", "AllSquash". + :type root_squash: str or ~azure.mgmt.storage.v2021_06_01.models.RootSquashType + :ivar version: The version of the share. + :vartype version: str + :ivar deleted: Indicates whether the share was deleted. + :vartype deleted: bool + :ivar deleted_time: The deleted time if the share was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. + :vartype remaining_retention_days: int + :param access_tier: Access tier for specific share. GpV2 account can choose between + TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Possible + values include: "TransactionOptimized", "Hot", "Cool", "Premium". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.ShareAccessTier + :ivar access_tier_change_time: Indicates the last modification time for share access tier. + :vartype access_tier_change_time: ~datetime.datetime + :ivar access_tier_status: Indicates if there is a pending transition for access tier. + :vartype access_tier_status: str + :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this + value may not include all recently created or recently resized files. + :vartype share_usage_bytes: long + :ivar lease_status: The lease status of the share. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the share. Possible values include: "Available", "Leased", + "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, + only when the share is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param signed_identifiers: List of stored access policies specified on the share. + :type signed_identifiers: list[~azure.mgmt.storage.v2021_06_01.models.SignedIdentifier] + :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares + with expand param "snapshots". + :vartype snapshot_time: ~datetime.datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'share_quota': {'maximum': 102400, 'minimum': 1}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'access_tier_change_time': {'readonly': True}, + 'access_tier_status': {'readonly': True}, + 'share_usage_bytes': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'snapshot_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'share_quota': {'key': 'properties.shareQuota', 'type': 'int'}, + 'enabled_protocols': {'key': 'properties.enabledProtocols', 'type': 'str'}, + 'root_squash': {'key': 'properties.rootSquash', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'access_tier_change_time': {'key': 'properties.accessTierChangeTime', 'type': 'iso-8601'}, + 'access_tier_status': {'key': 'properties.accessTierStatus', 'type': 'str'}, + 'share_usage_bytes': {'key': 'properties.shareUsageBytes', 'type': 'long'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'signed_identifiers': {'key': 'properties.signedIdentifiers', 'type': '[SignedIdentifier]'}, + 'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(FileShare, self).__init__(**kwargs) + self.last_modified_time = None + self.metadata = kwargs.get('metadata', None) + self.share_quota = kwargs.get('share_quota', None) + self.enabled_protocols = kwargs.get('enabled_protocols', None) + self.root_squash = kwargs.get('root_squash', None) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.access_tier = kwargs.get('access_tier', None) + self.access_tier_change_time = None + self.access_tier_status = None + self.share_usage_bytes = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.signed_identifiers = kwargs.get('signed_identifiers', None) + self.snapshot_time = None + + +class FileShareItem(AzureEntityResource): + """The file share properties be listed out. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar last_modified_time: Returns the date and time the share was last modified. + :vartype last_modified_time: ~datetime.datetime + :param metadata: A name-value pair to associate with the share as metadata. + :type metadata: dict[str, str] + :param share_quota: The maximum size of the share, in gigabytes. Must be greater than 0, and + less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. + :type share_quota: int + :param enabled_protocols: The authentication protocol that is used for the file share. Can only + be specified when creating a share. Possible values include: "SMB", "NFS". + :type enabled_protocols: str or ~azure.mgmt.storage.v2021_06_01.models.EnabledProtocols + :param root_squash: The property is for NFS share only. The default is NoRootSquash. Possible + values include: "NoRootSquash", "RootSquash", "AllSquash". + :type root_squash: str or ~azure.mgmt.storage.v2021_06_01.models.RootSquashType + :ivar version: The version of the share. + :vartype version: str + :ivar deleted: Indicates whether the share was deleted. + :vartype deleted: bool + :ivar deleted_time: The deleted time if the share was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. + :vartype remaining_retention_days: int + :param access_tier: Access tier for specific share. GpV2 account can choose between + TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Possible + values include: "TransactionOptimized", "Hot", "Cool", "Premium". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.ShareAccessTier + :ivar access_tier_change_time: Indicates the last modification time for share access tier. + :vartype access_tier_change_time: ~datetime.datetime + :ivar access_tier_status: Indicates if there is a pending transition for access tier. + :vartype access_tier_status: str + :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this + value may not include all recently created or recently resized files. + :vartype share_usage_bytes: long + :ivar lease_status: The lease status of the share. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the share. Possible values include: "Available", "Leased", + "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, + only when the share is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param signed_identifiers: List of stored access policies specified on the share. + :type signed_identifiers: list[~azure.mgmt.storage.v2021_06_01.models.SignedIdentifier] + :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares + with expand param "snapshots". + :vartype snapshot_time: ~datetime.datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'share_quota': {'maximum': 102400, 'minimum': 1}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'access_tier_change_time': {'readonly': True}, + 'access_tier_status': {'readonly': True}, + 'share_usage_bytes': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'snapshot_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'share_quota': {'key': 'properties.shareQuota', 'type': 'int'}, + 'enabled_protocols': {'key': 'properties.enabledProtocols', 'type': 'str'}, + 'root_squash': {'key': 'properties.rootSquash', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'access_tier_change_time': {'key': 'properties.accessTierChangeTime', 'type': 'iso-8601'}, + 'access_tier_status': {'key': 'properties.accessTierStatus', 'type': 'str'}, + 'share_usage_bytes': {'key': 'properties.shareUsageBytes', 'type': 'long'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'signed_identifiers': {'key': 'properties.signedIdentifiers', 'type': '[SignedIdentifier]'}, + 'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(FileShareItem, self).__init__(**kwargs) + self.last_modified_time = None + self.metadata = kwargs.get('metadata', None) + self.share_quota = kwargs.get('share_quota', None) + self.enabled_protocols = kwargs.get('enabled_protocols', None) + self.root_squash = kwargs.get('root_squash', None) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.access_tier = kwargs.get('access_tier', None) + self.access_tier_change_time = None + self.access_tier_status = None + self.share_usage_bytes = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.signed_identifiers = kwargs.get('signed_identifiers', None) + self.snapshot_time = None + + +class FileShareItems(msrest.serialization.Model): + """Response schema. Contains list of shares returned, and if paging is requested or required, a URL to next page of shares. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of file shares returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.FileShareItem] + :ivar next_link: Request URL that can be used to query next page of shares. Returned when total + number of requested shares exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileShareItem]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FileShareItems, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class GeoReplicationStats(msrest.serialization.Model): + """Statistics related to replication for storage account's Blob, Table, Queue and File services. It is only available when geo-redundant replication is enabled for the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of the secondary location. Possible values are: - Live: Indicates that + the secondary location is active and operational. - Bootstrap: Indicates initial + synchronization from the primary location to the secondary location is in progress.This + typically occurs when replication is first enabled. - Unavailable: Indicates that the secondary + location is temporarily unavailable. Possible values include: "Live", "Bootstrap", + "Unavailable". + :vartype status: str or ~azure.mgmt.storage.v2021_06_01.models.GeoReplicationStatus + :ivar last_sync_time: All primary writes preceding this UTC date/time value are guaranteed to + be available for read operations. Primary writes following this point in time may or may not be + available for reads. Element may be default value if value of LastSyncTime is not available, + this can happen if secondary is offline or we are in bootstrap. + :vartype last_sync_time: ~datetime.datetime + :ivar can_failover: A boolean flag which indicates whether or not account failover is supported + for the account. + :vartype can_failover: bool + """ + + _validation = { + 'status': {'readonly': True}, + 'last_sync_time': {'readonly': True}, + 'can_failover': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'last_sync_time': {'key': 'lastSyncTime', 'type': 'iso-8601'}, + 'can_failover': {'key': 'canFailover', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(GeoReplicationStats, self).__init__(**kwargs) + self.status = None + self.last_sync_time = None + self.can_failover = None + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: Required. The identity type. Possible values include: "None", "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.IdentityType + :param user_assigned_identities: Gets or sets a list of key value pairs that describe the set + of User Assigned identities that will be used with this storage account. The key is the ARM + resource identifier of the identity. Only 1 User Assigned identity is permitted here. + :type user_assigned_identities: dict[str, + ~azure.mgmt.storage.v2021_06_01.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs['type'] + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class ImmutabilityPolicy(AzureEntityResource): + """The ImmutabilityPolicy property of a blob container, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked + and Unlocked. Possible values include: "Locked", "Unlocked". + :vartype state: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'immutability_period_since_creation_in_days': {'key': 'properties.immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'properties.allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'properties.allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ImmutabilityPolicy, self).__init__(**kwargs) + self.immutability_period_since_creation_in_days = kwargs.get('immutability_period_since_creation_in_days', None) + self.state = None + self.allow_protected_append_writes = kwargs.get('allow_protected_append_writes', None) + self.allow_protected_append_writes_all = kwargs.get('allow_protected_append_writes_all', None) + + +class ImmutabilityPolicyProperties(msrest.serialization.Model): + """The properties of an ImmutabilityPolicy of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar etag: ImmutabilityPolicy Etag. + :vartype etag: str + :ivar update_history: The ImmutabilityPolicy update history of the blob container. + :vartype update_history: list[~azure.mgmt.storage.v2021_06_01.models.UpdateHistoryProperty] + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked + and Unlocked. Possible values include: "Locked", "Unlocked". + :vartype state: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'etag': {'readonly': True}, + 'update_history': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'update_history': {'key': 'updateHistory', 'type': '[UpdateHistoryProperty]'}, + 'immutability_period_since_creation_in_days': {'key': 'properties.immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'properties.allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'properties.allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ImmutabilityPolicyProperties, self).__init__(**kwargs) + self.etag = None + self.update_history = None + self.immutability_period_since_creation_in_days = kwargs.get('immutability_period_since_creation_in_days', None) + self.state = None + self.allow_protected_append_writes = kwargs.get('allow_protected_append_writes', None) + self.allow_protected_append_writes_all = kwargs.get('allow_protected_append_writes_all', None) + + +class ImmutableStorageAccount(msrest.serialization.Model): + """This property enables and defines account-level immutability. Enabling the feature auto-enables Blob Versioning. + + :param enabled: A boolean flag which enables account-level immutability. All the containers + under such an account have object-level immutability enabled by default. + :type enabled: bool + :param immutability_policy: Specifies the default account-level immutability policy which is + inherited and applied to objects that do not possess an explicit immutability policy at the + object level. The object-level immutability policy has higher precedence than the + container-level immutability policy, which has a higher precedence than the account-level + immutability policy. + :type immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.AccountImmutabilityPolicyProperties + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'immutability_policy': {'key': 'immutabilityPolicy', 'type': 'AccountImmutabilityPolicyProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(ImmutableStorageAccount, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.immutability_policy = kwargs.get('immutability_policy', None) + + +class ImmutableStorageWithVersioning(msrest.serialization.Model): + """Object level immutability properties of the container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param enabled: This is an immutable property, when set to true it enables object level + immutability at the container level. + :type enabled: bool + :ivar time_stamp: Returns the date and time the object level immutability was enabled. + :vartype time_stamp: ~datetime.datetime + :ivar migration_state: This property denotes the container level immutability to object level + immutability migration state. Possible values include: "InProgress", "Completed". + :vartype migration_state: str or ~azure.mgmt.storage.v2021_06_01.models.MigrationState + """ + + _validation = { + 'time_stamp': {'readonly': True}, + 'migration_state': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'}, + 'migration_state': {'key': 'migrationState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ImmutableStorageWithVersioning, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.time_stamp = None + self.migration_state = None + + +class IPRule(msrest.serialization.Model): + """IP rule with specific IP or IP range in CIDR format. + + All required parameters must be populated in order to send to Azure. + + :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 + address is allowed. + :type ip_address_or_range: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str + """ + + _validation = { + 'ip_address_or_range': {'required': True}, + } + + _attribute_map = { + 'ip_address_or_range': {'key': 'value', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IPRule, self).__init__(**kwargs) + self.ip_address_or_range = kwargs['ip_address_or_range'] + self.action = kwargs.get('action', None) + + +class KeyCreationTime(msrest.serialization.Model): + """Storage account keys creation time. + + :param key1: + :type key1: ~datetime.datetime + :param key2: + :type key2: ~datetime.datetime + """ + + _attribute_map = { + 'key1': {'key': 'key1', 'type': 'iso-8601'}, + 'key2': {'key': 'key2', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyCreationTime, self).__init__(**kwargs) + self.key1 = kwargs.get('key1', None) + self.key2 = kwargs.get('key2', None) + + +class KeyPolicy(msrest.serialization.Model): + """KeyPolicy assigned to the storage account. + + All required parameters must be populated in order to send to Azure. + + :param key_expiration_period_in_days: Required. The key expiration period in days. + :type key_expiration_period_in_days: int + """ + + _validation = { + 'key_expiration_period_in_days': {'required': True}, + } + + _attribute_map = { + 'key_expiration_period_in_days': {'key': 'keyExpirationPeriodInDays', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyPolicy, self).__init__(**kwargs) + self.key_expiration_period_in_days = kwargs['key_expiration_period_in_days'] + + +class KeyVaultProperties(msrest.serialization.Model): + """Properties of key vault. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param key_name: The name of KeyVault key. + :type key_name: str + :param key_version: The version of KeyVault key. + :type key_version: str + :param key_vault_uri: The Uri of KeyVault. + :type key_vault_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + """ + + _validation = { + 'current_versioned_key_identifier': {'readonly': True}, + 'last_key_rotation_timestamp': {'readonly': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyname', 'type': 'str'}, + 'key_version': {'key': 'keyversion', 'type': 'str'}, + 'key_vault_uri': {'key': 'keyvaulturi', 'type': 'str'}, + 'current_versioned_key_identifier': {'key': 'currentVersionedKeyIdentifier', 'type': 'str'}, + 'last_key_rotation_timestamp': {'key': 'lastKeyRotationTimestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) + self.key_version = kwargs.get('key_version', None) + self.key_vault_uri = kwargs.get('key_vault_uri', None) + self.current_versioned_key_identifier = None + self.last_key_rotation_timestamp = None + + +class LastAccessTimeTrackingPolicy(msrest.serialization.Model): + """The blob service properties for Last access time based tracking policy. + + All required parameters must be populated in order to send to Azure. + + :param enable: Required. When set to true last access time based tracking is enabled. + :type enable: bool + :param name: Name of the policy. The valid value is AccessTimeTracking. This field is currently + read only. Possible values include: "AccessTimeTracking". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.Name + :param tracking_granularity_in_days: The field specifies blob object tracking granularity in + days, typically how often the blob object should be tracked.This field is currently read only + with value as 1. + :type tracking_granularity_in_days: int + :param blob_type: An array of predefined supported blob types. Only blockBlob is the supported + value. This field is currently read only. + :type blob_type: list[str] + """ + + _validation = { + 'enable': {'required': True}, + } + + _attribute_map = { + 'enable': {'key': 'enable', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'tracking_granularity_in_days': {'key': 'trackingGranularityInDays', 'type': 'int'}, + 'blob_type': {'key': 'blobType', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(LastAccessTimeTrackingPolicy, self).__init__(**kwargs) + self.enable = kwargs['enable'] + self.name = kwargs.get('name', None) + self.tracking_granularity_in_days = kwargs.get('tracking_granularity_in_days', None) + self.blob_type = kwargs.get('blob_type', None) + + +class LeaseContainerRequest(msrest.serialization.Model): + """Lease Container request schema. + + All required parameters must be populated in order to send to Azure. + + :param action: Required. Specifies the lease action. Can be one of the available actions. + Possible values include: "Acquire", "Renew", "Change", "Release", "Break". + :type action: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerRequestAction + :param lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :type lease_id: str + :param break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :type break_period: int + :param lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, + or negative one (-1) for a lease that never expires. + :type lease_duration: int + :param proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :type proposed_lease_id: str + """ + + _validation = { + 'action': {'required': True}, + } + + _attribute_map = { + 'action': {'key': 'action', 'type': 'str'}, + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'break_period': {'key': 'breakPeriod', 'type': 'int'}, + 'lease_duration': {'key': 'leaseDuration', 'type': 'int'}, + 'proposed_lease_id': {'key': 'proposedLeaseId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LeaseContainerRequest, self).__init__(**kwargs) + self.action = kwargs['action'] + self.lease_id = kwargs.get('lease_id', None) + self.break_period = kwargs.get('break_period', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.proposed_lease_id = kwargs.get('proposed_lease_id', None) + + +class LeaseContainerResponse(msrest.serialization.Model): + """Lease Container response schema. + + :param lease_id: Returned unique lease ID that must be included with any request to delete the + container, or to renew, change, or release the lease. + :type lease_id: str + :param lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :type lease_time_seconds: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'lease_time_seconds': {'key': 'leaseTimeSeconds', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LeaseContainerResponse, self).__init__(**kwargs) + self.lease_id = kwargs.get('lease_id', None) + self.lease_time_seconds = kwargs.get('lease_time_seconds', None) + + +class LeaseShareRequest(msrest.serialization.Model): + """Lease Share request schema. + + All required parameters must be populated in order to send to Azure. + + :param action: Required. Specifies the lease action. Can be one of the available actions. + Possible values include: "Acquire", "Renew", "Change", "Release", "Break". + :type action: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseShareAction + :param lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :type lease_id: str + :param break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :type break_period: int + :param lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, + or negative one (-1) for a lease that never expires. + :type lease_duration: int + :param proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :type proposed_lease_id: str + """ + + _validation = { + 'action': {'required': True}, + } + + _attribute_map = { + 'action': {'key': 'action', 'type': 'str'}, + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'break_period': {'key': 'breakPeriod', 'type': 'int'}, + 'lease_duration': {'key': 'leaseDuration', 'type': 'int'}, + 'proposed_lease_id': {'key': 'proposedLeaseId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LeaseShareRequest, self).__init__(**kwargs) + self.action = kwargs['action'] + self.lease_id = kwargs.get('lease_id', None) + self.break_period = kwargs.get('break_period', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.proposed_lease_id = kwargs.get('proposed_lease_id', None) + + +class LeaseShareResponse(msrest.serialization.Model): + """Lease Share response schema. + + :param lease_id: Returned unique lease ID that must be included with any request to delete the + share, or to renew, change, or release the lease. + :type lease_id: str + :param lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :type lease_time_seconds: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'lease_time_seconds': {'key': 'leaseTimeSeconds', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LeaseShareResponse, self).__init__(**kwargs) + self.lease_id = kwargs.get('lease_id', None) + self.lease_time_seconds = kwargs.get('lease_time_seconds', None) + + +class LegalHold(msrest.serialization.Model): + """The LegalHold property of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :param tags: Required. A set of tags. Each tag should be 3 to 23 alphanumeric characters and is + normalized to lower case at SRP. + :type tags: list[str] + :param allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'has_legal_hold': {'readonly': True}, + 'tags': {'required': True}, + } + + _attribute_map = { + 'has_legal_hold': {'key': 'hasLegalHold', 'type': 'bool'}, + 'tags': {'key': 'tags', 'type': '[str]'}, + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(LegalHold, self).__init__(**kwargs) + self.has_legal_hold = None + self.tags = kwargs['tags'] + self.allow_protected_append_writes_all = kwargs.get('allow_protected_append_writes_all', None) + + +class LegalHoldProperties(msrest.serialization.Model): + """The LegalHold property of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :param tags: A set of tags. The list of LegalHold tags of a blob container. + :type tags: list[~azure.mgmt.storage.v2021_06_01.models.TagProperty] + :param protected_append_writes_history: Protected append blob writes history. + :type protected_append_writes_history: + ~azure.mgmt.storage.v2021_06_01.models.ProtectedAppendWritesHistory + """ + + _validation = { + 'has_legal_hold': {'readonly': True}, + } + + _attribute_map = { + 'has_legal_hold': {'key': 'hasLegalHold', 'type': 'bool'}, + 'tags': {'key': 'tags', 'type': '[TagProperty]'}, + 'protected_append_writes_history': {'key': 'protectedAppendWritesHistory', 'type': 'ProtectedAppendWritesHistory'}, + } + + def __init__( + self, + **kwargs + ): + super(LegalHoldProperties, self).__init__(**kwargs) + self.has_legal_hold = None + self.tags = kwargs.get('tags', None) + self.protected_append_writes_history = kwargs.get('protected_append_writes_history', None) + + +class ListAccountSasResponse(msrest.serialization.Model): + """The List SAS credentials operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar account_sas_token: List SAS credentials of storage account. + :vartype account_sas_token: str + """ + + _validation = { + 'account_sas_token': {'readonly': True}, + } + + _attribute_map = { + 'account_sas_token': {'key': 'accountSasToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListAccountSasResponse, self).__init__(**kwargs) + self.account_sas_token = None + + +class ListBlobInventoryPolicy(msrest.serialization.Model): + """List of blob inventory policies returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blob inventory policies. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BlobInventoryPolicy]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListBlobInventoryPolicy, self).__init__(**kwargs) + self.value = None + + +class ListContainerItem(AzureEntityResource): + """The blob container properties be listed out. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar version: The version of the deleted blob container. + :vartype version: str + :ivar deleted: Indicates whether the blob container was deleted. + :vartype deleted: bool + :ivar deleted_time: Blob container deletion time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. + :vartype remaining_retention_days: int + :param default_encryption_scope: Default the container to use specified encryption scope for + all writes. + :type default_encryption_scope: str + :param deny_encryption_scope_override: Block override of encryption scope from the container + default. + :type deny_encryption_scope_override: bool + :param public_access: Specifies whether data in the container may be accessed publicly and the + level of access. Possible values include: "Container", "Blob", "None". + :type public_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicAccess + :ivar last_modified_time: Returns the date and time the container was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar lease_status: The lease status of the container. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the container. Possible values include: "Available", + "Leased", "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed + duration, only when the container is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param metadata: A name-value pair to associate with the container as metadata. + :type metadata: dict[str, str] + :ivar immutability_policy: The ImmutabilityPolicy property of the container. + :vartype immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyProperties + :ivar legal_hold: The LegalHold property of the container. + :vartype legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHoldProperties + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP + if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public + property is set to false by SRP if ImmutabilityPolicy has not been created for this container. + :vartype has_immutability_policy: bool + :param immutable_storage_with_versioning: The object level immutability property of the + container. The property is immutable and can only be set to true at the container creation + time. Existing containers must undergo a migration process. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageWithVersioning + :param enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. + :type enable_nfs_v3_root_squash: bool + :param enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. + :type enable_nfs_v3_all_squash: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'immutability_policy': {'readonly': True}, + 'legal_hold': {'readonly': True}, + 'has_legal_hold': {'readonly': True}, + 'has_immutability_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'default_encryption_scope': {'key': 'properties.defaultEncryptionScope', 'type': 'str'}, + 'deny_encryption_scope_override': {'key': 'properties.denyEncryptionScopeOverride', 'type': 'bool'}, + 'public_access': {'key': 'properties.publicAccess', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'immutability_policy': {'key': 'properties.immutabilityPolicy', 'type': 'ImmutabilityPolicyProperties'}, + 'legal_hold': {'key': 'properties.legalHold', 'type': 'LegalHoldProperties'}, + 'has_legal_hold': {'key': 'properties.hasLegalHold', 'type': 'bool'}, + 'has_immutability_policy': {'key': 'properties.hasImmutabilityPolicy', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageWithVersioning'}, + 'enable_nfs_v3_root_squash': {'key': 'properties.enableNfsV3RootSquash', 'type': 'bool'}, + 'enable_nfs_v3_all_squash': {'key': 'properties.enableNfsV3AllSquash', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ListContainerItem, self).__init__(**kwargs) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.deny_encryption_scope_override = kwargs.get('deny_encryption_scope_override', None) + self.public_access = kwargs.get('public_access', None) + self.last_modified_time = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.metadata = kwargs.get('metadata', None) + self.immutability_policy = None + self.legal_hold = None + self.has_legal_hold = None + self.has_immutability_policy = None + self.immutable_storage_with_versioning = kwargs.get('immutable_storage_with_versioning', None) + self.enable_nfs_v3_root_squash = kwargs.get('enable_nfs_v3_root_squash', None) + self.enable_nfs_v3_all_squash = kwargs.get('enable_nfs_v3_all_squash', None) + + +class ListContainerItems(msrest.serialization.Model): + """Response schema. Contains list of blobs returned, and if paging is requested or required, a URL to next page of containers. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blobs containers returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.ListContainerItem] + :ivar next_link: Request URL that can be used to query next page of containers. Returned when + total number of requested containers exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ListContainerItem]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListContainerItems, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListQueue(Resource): + """ListQueue. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param metadata: A name-value pair that represents queue metadata. + :type metadata: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(ListQueue, self).__init__(**kwargs) + self.metadata = kwargs.get('metadata', None) + + +class ListQueueResource(msrest.serialization.Model): + """Response schema. Contains list of queues returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of queues returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.ListQueue] + :ivar next_link: Request URL that can be used to list next page of queues. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ListQueue]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListQueueResource, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListQueueServices(msrest.serialization.Model): + """ListQueueServices. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of queue services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QueueServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListQueueServices, self).__init__(**kwargs) + self.value = None + + +class ListServiceSasResponse(msrest.serialization.Model): + """The List service SAS credentials operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar service_sas_token: List service SAS credentials of specific resource. + :vartype service_sas_token: str + """ + + _validation = { + 'service_sas_token': {'readonly': True}, + } + + _attribute_map = { + 'service_sas_token': {'key': 'serviceSasToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListServiceSasResponse, self).__init__(**kwargs) + self.service_sas_token = None + + +class ListTableResource(msrest.serialization.Model): + """Response schema. Contains list of tables returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of tables returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.Table] + :ivar next_link: Request URL that can be used to query next page of tables. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Table]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListTableResource, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListTableServices(msrest.serialization.Model): + """ListTableServices. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of table services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TableServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListTableServices, self).__init__(**kwargs) + self.value = None + + +class ManagementPolicy(Resource): + """The Get Storage Account ManagementPolicies operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. + :vartype last_modified_time: ~datetime.datetime + :param policy: The Storage Account ManagementPolicy, in JSON format. See more details in: + https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + :type policy: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicySchema + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'policy': {'key': 'properties.policy', 'type': 'ManagementPolicySchema'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicy, self).__init__(**kwargs) + self.last_modified_time = None + self.policy = kwargs.get('policy', None) + + +class ManagementPolicyAction(msrest.serialization.Model): + """Actions are applied to the filtered blobs when the execution condition is met. + + :param base_blob: The management policy action for base blob. + :type base_blob: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyBaseBlob + :param snapshot: The management policy action for snapshot. + :type snapshot: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicySnapShot + :param version: The management policy action for version. + :type version: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyVersion + """ + + _attribute_map = { + 'base_blob': {'key': 'baseBlob', 'type': 'ManagementPolicyBaseBlob'}, + 'snapshot': {'key': 'snapshot', 'type': 'ManagementPolicySnapShot'}, + 'version': {'key': 'version', 'type': 'ManagementPolicyVersion'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyAction, self).__init__(**kwargs) + self.base_blob = kwargs.get('base_blob', None) + self.snapshot = kwargs.get('snapshot', None) + self.version = kwargs.get('version', None) + + +class ManagementPolicyBaseBlob(msrest.serialization.Model): + """Management policy action for base blob. + + :param tier_to_cool: The function to tier blobs to cool storage. Support blobs currently at Hot + tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param tier_to_archive: The function to tier blobs to archive storage. Support blobs currently + at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param delete: The function to delete the blob. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param enable_auto_tier_to_hot_from_cool: This property enables auto tiering of a blob from + cool to hot on a blob access. This property requires + tierToCool.daysAfterLastAccessTimeGreaterThan. + :type enable_auto_tier_to_hot_from_cool: bool + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterModification'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterModification'}, + 'delete': {'key': 'delete', 'type': 'DateAfterModification'}, + 'enable_auto_tier_to_hot_from_cool': {'key': 'enableAutoTierToHotFromCool', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyBaseBlob, self).__init__(**kwargs) + self.tier_to_cool = kwargs.get('tier_to_cool', None) + self.tier_to_archive = kwargs.get('tier_to_archive', None) + self.delete = kwargs.get('delete', None) + self.enable_auto_tier_to_hot_from_cool = kwargs.get('enable_auto_tier_to_hot_from_cool', None) + + +class ManagementPolicyDefinition(msrest.serialization.Model): + """An object that defines the Lifecycle rule. Each definition is made up with a filters set and an actions set. + + All required parameters must be populated in order to send to Azure. + + :param actions: Required. An object that defines the action set. + :type actions: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyAction + :param filters: An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyFilter + """ + + _validation = { + 'actions': {'required': True}, + } + + _attribute_map = { + 'actions': {'key': 'actions', 'type': 'ManagementPolicyAction'}, + 'filters': {'key': 'filters', 'type': 'ManagementPolicyFilter'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyDefinition, self).__init__(**kwargs) + self.actions = kwargs['actions'] + self.filters = kwargs.get('filters', None) + + +class ManagementPolicyFilter(msrest.serialization.Model): + """Filters limit rule actions to a subset of blobs within the storage account. If multiple filters are defined, a logical AND is performed on all filters. + + All required parameters must be populated in order to send to Azure. + + :param prefix_match: An array of strings for prefixes to be match. + :type prefix_match: list[str] + :param blob_types: Required. An array of predefined enum values. Currently blockBlob supports + all tiering and delete actions. Only delete actions are supported for appendBlob. + :type blob_types: list[str] + :param blob_index_match: An array of blob index tag based filters, there can be at most 10 tag + filters. + :type blob_index_match: list[~azure.mgmt.storage.v2021_06_01.models.TagFilter] + """ + + _validation = { + 'blob_types': {'required': True}, + } + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'blob_types': {'key': 'blobTypes', 'type': '[str]'}, + 'blob_index_match': {'key': 'blobIndexMatch', 'type': '[TagFilter]'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyFilter, self).__init__(**kwargs) + self.prefix_match = kwargs.get('prefix_match', None) + self.blob_types = kwargs['blob_types'] + self.blob_index_match = kwargs.get('blob_index_match', None) + + +class ManagementPolicyRule(msrest.serialization.Model): + """An object that wraps the Lifecycle rule. Each rule is uniquely defined by name. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Rule is enabled if set to true. + :type enabled: bool + :param name: Required. A rule name can contain any combination of alpha numeric characters. + Rule name is case-sensitive. It must be unique within a policy. + :type name: str + :param type: Required. The valid value is Lifecycle. Possible values include: "Lifecycle". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.RuleType + :param definition: Required. An object that defines the Lifecycle rule. + :type definition: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyDefinition + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'definition': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'ManagementPolicyDefinition'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyRule, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + self.name = kwargs['name'] + self.type = kwargs['type'] + self.definition = kwargs['definition'] + + +class ManagementPolicySchema(msrest.serialization.Model): + """The Storage Account ManagementPolicies Rules. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + + All required parameters must be populated in order to send to Azure. + + :param rules: Required. The Storage Account ManagementPolicies Rules. See more details in: + https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyRule] + """ + + _validation = { + 'rules': {'required': True}, + } + + _attribute_map = { + 'rules': {'key': 'rules', 'type': '[ManagementPolicyRule]'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicySchema, self).__init__(**kwargs) + self.rules = kwargs['rules'] + + +class ManagementPolicySnapShot(msrest.serialization.Model): + """Management policy action for snapshot. + + :param tier_to_cool: The function to tier blob snapshot to cool storage. Support blob snapshot + currently at Hot tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param tier_to_archive: The function to tier blob snapshot to archive storage. Support blob + snapshot currently at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param delete: The function to delete the blob snapshot. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterCreation'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterCreation'}, + 'delete': {'key': 'delete', 'type': 'DateAfterCreation'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicySnapShot, self).__init__(**kwargs) + self.tier_to_cool = kwargs.get('tier_to_cool', None) + self.tier_to_archive = kwargs.get('tier_to_archive', None) + self.delete = kwargs.get('delete', None) + + +class ManagementPolicyVersion(msrest.serialization.Model): + """Management policy action for blob version. + + :param tier_to_cool: The function to tier blob version to cool storage. Support blob version + currently at Hot tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param tier_to_archive: The function to tier blob version to archive storage. Support blob + version currently at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param delete: The function to delete the blob version. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterCreation'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterCreation'}, + 'delete': {'key': 'delete', 'type': 'DateAfterCreation'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementPolicyVersion, self).__init__(**kwargs) + self.tier_to_cool = kwargs.get('tier_to_cool', None) + self.tier_to_archive = kwargs.get('tier_to_archive', None) + self.delete = kwargs.get('delete', None) + + +class MetricSpecification(msrest.serialization.Model): + """Metric specification of operation. + + :param name: Name of metric specification. + :type name: str + :param display_name: Display name of metric specification. + :type display_name: str + :param display_description: Display description of metric specification. + :type display_description: str + :param unit: Unit could be Bytes or Count. + :type unit: str + :param dimensions: Dimensions of blobs, including blob type and access tier. + :type dimensions: list[~azure.mgmt.storage.v2021_06_01.models.Dimension] + :param aggregation_type: Aggregation type could be Average. + :type aggregation_type: str + :param fill_gap_with_zero: The property to decide fill gap with zero or not. + :type fill_gap_with_zero: bool + :param category: The category this metric specification belong to, could be Capacity. + :type category: str + :param resource_id_dimension_name_override: Account Resource Id. + :type resource_id_dimension_name_override: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'dimensions': {'key': 'dimensions', 'type': '[Dimension]'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'}, + 'category': {'key': 'category', 'type': 'str'}, + 'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(MetricSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.display_description = kwargs.get('display_description', None) + self.unit = kwargs.get('unit', None) + self.dimensions = kwargs.get('dimensions', None) + self.aggregation_type = kwargs.get('aggregation_type', None) + self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None) + self.category = kwargs.get('category', None) + self.resource_id_dimension_name_override = kwargs.get('resource_id_dimension_name_override', None) + + +class Multichannel(msrest.serialization.Model): + """Multichannel setting. Applies to Premium FileStorage only. + + :param enabled: Indicates whether multichannel is enabled. + :type enabled: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(Multichannel, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + + +class NetworkRuleSet(msrest.serialization.Model): + """Network rule set. + + All required parameters must be populated in order to send to Azure. + + :param bypass: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. + Possible values are any combination of Logging|Metrics|AzureServices (For example, "Logging, + Metrics"), or None to bypass none of those traffics. Possible values include: "None", + "Logging", "Metrics", "AzureServices". Default value: "AzureServices". + :type bypass: str or ~azure.mgmt.storage.v2021_06_01.models.Bypass + :param resource_access_rules: Sets the resource access rules. + :type resource_access_rules: list[~azure.mgmt.storage.v2021_06_01.models.ResourceAccessRule] + :param virtual_network_rules: Sets the virtual network rules. + :type virtual_network_rules: list[~azure.mgmt.storage.v2021_06_01.models.VirtualNetworkRule] + :param ip_rules: Sets the IP ACL rules. + :type ip_rules: list[~azure.mgmt.storage.v2021_06_01.models.IPRule] + :param default_action: Required. Specifies the default action of allow or deny when no other + rules match. Possible values include: "Allow", "Deny". Default value: "Allow". + :type default_action: str or ~azure.mgmt.storage.v2021_06_01.models.DefaultAction + """ + + _validation = { + 'default_action': {'required': True}, + } + + _attribute_map = { + 'bypass': {'key': 'bypass', 'type': 'str'}, + 'resource_access_rules': {'key': 'resourceAccessRules', 'type': '[ResourceAccessRule]'}, + 'virtual_network_rules': {'key': 'virtualNetworkRules', 'type': '[VirtualNetworkRule]'}, + 'ip_rules': {'key': 'ipRules', 'type': '[IPRule]'}, + 'default_action': {'key': 'defaultAction', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(NetworkRuleSet, self).__init__(**kwargs) + self.bypass = kwargs.get('bypass', "AzureServices") + self.resource_access_rules = kwargs.get('resource_access_rules', None) + self.virtual_network_rules = kwargs.get('virtual_network_rules', None) + self.ip_rules = kwargs.get('ip_rules', None) + self.default_action = kwargs.get('default_action', "Allow") + + +class ObjectReplicationPolicies(msrest.serialization.Model): + """List storage account object replication policies. + + :param value: The replication policy between two storage accounts. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ObjectReplicationPolicy]'}, + } + + def __init__( + self, + **kwargs + ): + super(ObjectReplicationPolicies, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class ObjectReplicationPolicy(Resource): + """The replication policy between two storage accounts. Multiple rules can be defined in one policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar policy_id: A unique id for object replication policy. + :vartype policy_id: str + :ivar enabled_time: Indicates when the policy is enabled on the source account. + :vartype enabled_time: ~datetime.datetime + :param source_account: Required. Source account name. It should be full resource id if + allowCrossTenantReplication set to false. + :type source_account: str + :param destination_account: Required. Destination account name. It should be full resource id + if allowCrossTenantReplication set to false. + :type destination_account: str + :param rules: The storage account object replication rules. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicyRule] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'policy_id': {'readonly': True}, + 'enabled_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'policy_id': {'key': 'properties.policyId', 'type': 'str'}, + 'enabled_time': {'key': 'properties.enabledTime', 'type': 'iso-8601'}, + 'source_account': {'key': 'properties.sourceAccount', 'type': 'str'}, + 'destination_account': {'key': 'properties.destinationAccount', 'type': 'str'}, + 'rules': {'key': 'properties.rules', 'type': '[ObjectReplicationPolicyRule]'}, + } + + def __init__( + self, + **kwargs + ): + super(ObjectReplicationPolicy, self).__init__(**kwargs) + self.policy_id = None + self.enabled_time = None + self.source_account = kwargs.get('source_account', None) + self.destination_account = kwargs.get('destination_account', None) + self.rules = kwargs.get('rules', None) + + +class ObjectReplicationPolicyFilter(msrest.serialization.Model): + """Filters limit replication to a subset of blobs within the storage account. A logical OR is performed on values in the filter. If multiple filters are defined, a logical AND is performed on all filters. + + :param prefix_match: Optional. Filters the results to replicate only blobs whose names begin + with the specified prefix. + :type prefix_match: list[str] + :param min_creation_time: Blobs created after the time will be replicated to the destination. + It must be in datetime format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z. + :type min_creation_time: str + """ + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'min_creation_time': {'key': 'minCreationTime', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ObjectReplicationPolicyFilter, self).__init__(**kwargs) + self.prefix_match = kwargs.get('prefix_match', None) + self.min_creation_time = kwargs.get('min_creation_time', None) + + +class ObjectReplicationPolicyRule(msrest.serialization.Model): + """The replication policy rule between two containers. + + All required parameters must be populated in order to send to Azure. + + :param rule_id: Rule Id is auto-generated for each new rule on destination account. It is + required for put policy on source account. + :type rule_id: str + :param source_container: Required. Required. Source container name. + :type source_container: str + :param destination_container: Required. Required. Destination container name. + :type destination_container: str + :param filters: Optional. An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicyFilter + """ + + _validation = { + 'source_container': {'required': True}, + 'destination_container': {'required': True}, + } + + _attribute_map = { + 'rule_id': {'key': 'ruleId', 'type': 'str'}, + 'source_container': {'key': 'sourceContainer', 'type': 'str'}, + 'destination_container': {'key': 'destinationContainer', 'type': 'str'}, + 'filters': {'key': 'filters', 'type': 'ObjectReplicationPolicyFilter'}, + } + + def __init__( + self, + **kwargs + ): + super(ObjectReplicationPolicyRule, self).__init__(**kwargs) + self.rule_id = kwargs.get('rule_id', None) + self.source_container = kwargs['source_container'] + self.destination_container = kwargs['destination_container'] + self.filters = kwargs.get('filters', None) + + +class Operation(msrest.serialization.Model): + """Storage REST API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display metadata associated with the operation. + :type display: ~azure.mgmt.storage.v2021_06_01.models.OperationDisplay + :param origin: The origin of operations. + :type origin: str + :param service_specification: One property of operation, include metric specifications. + :type service_specification: ~azure.mgmt.storage.v2021_06_01.models.ServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'ServiceSpecification'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display = kwargs.get('display', None) + self.origin = kwargs.get('origin', None) + self.service_specification = kwargs.get('service_specification', None) + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + :param provider: Service provider: Microsoft Storage. + :type provider: str + :param resource: Resource on which the operation is performed etc. + :type resource: str + :param operation: Type of operation: get, read, delete, etc. + :type operation: str + :param description: Description of the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Storage operations. It contains a list of operations and a URL link to get the next set of results. + + :param value: List of Storage operations supported by the Storage resource provider. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PrivateEndpoint(msrest.serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(Resource): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: + ~azure.mgmt.storage.v2021_06_01.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Possible values include: "Succeeded", "Creating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = None + + +class PrivateEndpointConnectionListResult(msrest.serialization.Model): + """List of private endpoint connection associated with the specified storage account. + + :param value: Array of private endpoint connections. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = kwargs.get('required_zone_names', None) + + +class PrivateLinkResourceListResult(msrest.serialization.Model): + """A list of private link resources. + + :param value: Array of private link resources. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected". + :type status: str or + ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param action_required: A message indicating if changes on the service provider require any + updates on the consumer. + :type action_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'action_required': {'key': 'actionRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.action_required = kwargs.get('action_required', None) + + +class ProtectedAppendWritesHistory(msrest.serialization.Model): + """Protected append writes history setting for the blob container with Legal holds. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes_all: bool + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + """ + + _validation = { + 'timestamp': {'readonly': True}, + } + + _attribute_map = { + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(ProtectedAppendWritesHistory, self).__init__(**kwargs) + self.allow_protected_append_writes_all = kwargs.get('allow_protected_append_writes_all', None) + self.timestamp = None + + +class ProtocolSettings(msrest.serialization.Model): + """Protocol settings for file service. + + :param smb: Setting for SMB protocol. + :type smb: ~azure.mgmt.storage.v2021_06_01.models.SmbSetting + """ + + _attribute_map = { + 'smb': {'key': 'smb', 'type': 'SmbSetting'}, + } + + def __init__( + self, + **kwargs + ): + super(ProtocolSettings, self).__init__(**kwargs) + self.smb = kwargs.get('smb', None) + + +class QueueServiceProperties(Resource): + """The properties of a storage account’s Queue service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param cors: Specifies CORS rules for the Queue service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Queue service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + } + + def __init__( + self, + **kwargs + ): + super(QueueServiceProperties, self).__init__(**kwargs) + self.cors = kwargs.get('cors', None) + + +class ResourceAccessRule(msrest.serialization.Model): + """Resource Access Rule. + + :param tenant_id: Tenant Id. + :type tenant_id: str + :param resource_id: Resource Id. + :type resource_id: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceAccessRule, self).__init__(**kwargs) + self.tenant_id = kwargs.get('tenant_id', None) + self.resource_id = kwargs.get('resource_id', None) + + +class RestorePolicyProperties(msrest.serialization.Model): + """The blob service properties for blob restore policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Blob restore is enabled if set to true. + :type enabled: bool + :param days: how long this blob can be restored. It should be great than zero and less than + DeleteRetentionPolicy.days. + :type days: int + :ivar last_enabled_time: Deprecated in favor of minRestoreTime property. + :vartype last_enabled_time: ~datetime.datetime + :ivar min_restore_time: Returns the minimum date and time that the restore can be started. + :vartype min_restore_time: ~datetime.datetime + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'maximum': 365, 'minimum': 1}, + 'last_enabled_time': {'readonly': True}, + 'min_restore_time': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'days': {'key': 'days', 'type': 'int'}, + 'last_enabled_time': {'key': 'lastEnabledTime', 'type': 'iso-8601'}, + 'min_restore_time': {'key': 'minRestoreTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(RestorePolicyProperties, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.days = kwargs.get('days', None) + self.last_enabled_time = None + self.min_restore_time = None + + +class Restriction(msrest.serialization.Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The type of restrictions. As of now only possible value for this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to location. This would + be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. As of now this can be "QuotaId" or + "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the + subscription does not belong to that quota. The "NotAvailableForSubscription" is related to + capacity at DC. Possible values include: "QuotaId", "NotAvailableForSubscription". + :type reason_code: str or ~azure.mgmt.storage.v2021_06_01.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = kwargs.get('reason_code', None) + + +class RoutingPreference(msrest.serialization.Model): + """Routing preference defines the type of network, either microsoft or internet routing to be used to deliver the user data, the default option is microsoft routing. + + :param routing_choice: Routing Choice defines the kind of network routing opted by the user. + Possible values include: "MicrosoftRouting", "InternetRouting". + :type routing_choice: str or ~azure.mgmt.storage.v2021_06_01.models.RoutingChoice + :param publish_microsoft_endpoints: A boolean flag which indicates whether microsoft routing + storage endpoints are to be published. + :type publish_microsoft_endpoints: bool + :param publish_internet_endpoints: A boolean flag which indicates whether internet routing + storage endpoints are to be published. + :type publish_internet_endpoints: bool + """ + + _attribute_map = { + 'routing_choice': {'key': 'routingChoice', 'type': 'str'}, + 'publish_microsoft_endpoints': {'key': 'publishMicrosoftEndpoints', 'type': 'bool'}, + 'publish_internet_endpoints': {'key': 'publishInternetEndpoints', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(RoutingPreference, self).__init__(**kwargs) + self.routing_choice = kwargs.get('routing_choice', None) + self.publish_microsoft_endpoints = kwargs.get('publish_microsoft_endpoints', None) + self.publish_internet_endpoints = kwargs.get('publish_internet_endpoints', None) + + +class SasPolicy(msrest.serialization.Model): + """SasPolicy assigned to the storage account. + + All required parameters must be populated in order to send to Azure. + + :param sas_expiration_period: Required. The SAS expiration period, DD.HH:MM:SS. + :type sas_expiration_period: str + :param expiration_action: Required. The SAS expiration action. Can only be Log. Possible values + include: "Log". Default value: "Log". + :type expiration_action: str or ~azure.mgmt.storage.v2021_06_01.models.ExpirationAction + """ + + _validation = { + 'sas_expiration_period': {'required': True}, + 'expiration_action': {'required': True}, + } + + _attribute_map = { + 'sas_expiration_period': {'key': 'sasExpirationPeriod', 'type': 'str'}, + 'expiration_action': {'key': 'expirationAction', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SasPolicy, self).__init__(**kwargs) + self.sas_expiration_period = kwargs['sas_expiration_period'] + self.expiration_action = kwargs.get('expiration_action', "Log") + + +class ServiceSasParameters(msrest.serialization.Model): + """The parameters to list service SAS credentials of a specific resource. + + All required parameters must be populated in order to send to Azure. + + :param canonicalized_resource: Required. The canonical path to the signed resource. + :type canonicalized_resource: str + :param resource: The signed services accessible with the service SAS. Possible values include: + Blob (b), Container (c), File (f), Share (s). Possible values include: "b", "c", "f", "s". + :type resource: str or ~azure.mgmt.storage.v2021_06_01.models.SignedResource + :param permissions: The signed permissions for the service SAS. Possible values include: Read + (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Possible + values include: "r", "d", "w", "l", "a", "c", "u", "p". + :type permissions: str or ~azure.mgmt.storage.v2021_06_01.models.Permissions + :param ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :type ip_address_or_range: str + :param protocols: The protocol permitted for a request made with the account SAS. Possible + values include: "https,http", "https". + :type protocols: str or ~azure.mgmt.storage.v2021_06_01.models.HttpProtocol + :param shared_access_start_time: The time at which the SAS becomes valid. + :type shared_access_start_time: ~datetime.datetime + :param shared_access_expiry_time: The time at which the shared access signature becomes + invalid. + :type shared_access_expiry_time: ~datetime.datetime + :param identifier: A unique value up to 64 characters in length that correlates to an access + policy specified for the container, queue, or table. + :type identifier: str + :param partition_key_start: The start of partition key. + :type partition_key_start: str + :param partition_key_end: The end of partition key. + :type partition_key_end: str + :param row_key_start: The start of row key. + :type row_key_start: str + :param row_key_end: The end of row key. + :type row_key_end: str + :param key_to_sign: The key to sign the account SAS token with. + :type key_to_sign: str + :param cache_control: The response header override for cache control. + :type cache_control: str + :param content_disposition: The response header override for content disposition. + :type content_disposition: str + :param content_encoding: The response header override for content encoding. + :type content_encoding: str + :param content_language: The response header override for content language. + :type content_language: str + :param content_type: The response header override for content type. + :type content_type: str + """ + + _validation = { + 'canonicalized_resource': {'required': True}, + 'identifier': {'max_length': 64, 'min_length': 0}, + } + + _attribute_map = { + 'canonicalized_resource': {'key': 'canonicalizedResource', 'type': 'str'}, + 'resource': {'key': 'signedResource', 'type': 'str'}, + 'permissions': {'key': 'signedPermission', 'type': 'str'}, + 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, + 'protocols': {'key': 'signedProtocol', 'type': 'str'}, + 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, + 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, + 'identifier': {'key': 'signedIdentifier', 'type': 'str'}, + 'partition_key_start': {'key': 'startPk', 'type': 'str'}, + 'partition_key_end': {'key': 'endPk', 'type': 'str'}, + 'row_key_start': {'key': 'startRk', 'type': 'str'}, + 'row_key_end': {'key': 'endRk', 'type': 'str'}, + 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, + 'cache_control': {'key': 'rscc', 'type': 'str'}, + 'content_disposition': {'key': 'rscd', 'type': 'str'}, + 'content_encoding': {'key': 'rsce', 'type': 'str'}, + 'content_language': {'key': 'rscl', 'type': 'str'}, + 'content_type': {'key': 'rsct', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceSasParameters, self).__init__(**kwargs) + self.canonicalized_resource = kwargs['canonicalized_resource'] + self.resource = kwargs.get('resource', None) + self.permissions = kwargs.get('permissions', None) + self.ip_address_or_range = kwargs.get('ip_address_or_range', None) + self.protocols = kwargs.get('protocols', None) + self.shared_access_start_time = kwargs.get('shared_access_start_time', None) + self.shared_access_expiry_time = kwargs.get('shared_access_expiry_time', None) + self.identifier = kwargs.get('identifier', None) + self.partition_key_start = kwargs.get('partition_key_start', None) + self.partition_key_end = kwargs.get('partition_key_end', None) + self.row_key_start = kwargs.get('row_key_start', None) + self.row_key_end = kwargs.get('row_key_end', None) + self.key_to_sign = kwargs.get('key_to_sign', None) + self.cache_control = kwargs.get('cache_control', None) + self.content_disposition = kwargs.get('content_disposition', None) + self.content_encoding = kwargs.get('content_encoding', None) + self.content_language = kwargs.get('content_language', None) + self.content_type = kwargs.get('content_type', None) + + +class ServiceSpecification(msrest.serialization.Model): + """One property of operation, include metric specifications. + + :param metric_specifications: Metric specifications of operation. + :type metric_specifications: list[~azure.mgmt.storage.v2021_06_01.models.MetricSpecification] + """ + + _attribute_map = { + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceSpecification, self).__init__(**kwargs) + self.metric_specifications = kwargs.get('metric_specifications', None) + + +class SignedIdentifier(msrest.serialization.Model): + """SignedIdentifier. + + :param id: An unique identifier of the stored access policy. + :type id: str + :param access_policy: Access policy. + :type access_policy: ~azure.mgmt.storage.v2021_06_01.models.AccessPolicy + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'access_policy': {'key': 'accessPolicy', 'type': 'AccessPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.access_policy = kwargs.get('access_policy', None) + + +class Sku(msrest.serialization.Model): + """The SKU of the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The SKU name. Required for account creation; optional for update. Note + that in older versions, SKU name was called accountType. Possible values include: + "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Possible values include: "Standard", + "Premium". + :vartype tier: str or ~azure.mgmt.storage.v2021_06_01.models.SkuTier + """ + + _validation = { + 'name': {'required': True}, + 'tier': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = kwargs['name'] + self.tier = None + + +class SKUCapability(msrest.serialization.Model): + """The capability information in the specified SKU, including file encryption, network ACLs, change notification, etc. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of capability, The capability information in the specified SKU, including + file encryption, network ACLs, change notification, etc. + :vartype name: str + :ivar value: A string value to indicate states of given capability. Possibly 'true' or 'false'. + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SKUCapability, self).__init__(**kwargs) + self.name = None + self.value = None + + +class SkuInformation(msrest.serialization.Model): + """Storage SKU and its properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The SKU name. Required for account creation; optional for update. Note + that in older versions, SKU name was called accountType. Possible values include: + "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Possible values include: "Standard", + "Premium". + :vartype tier: str or ~azure.mgmt.storage.v2021_06_01.models.SkuTier + :ivar resource_type: The type of the resource, usually it is 'storageAccounts'. + :vartype resource_type: str + :ivar kind: Indicates the type of storage account. Possible values include: "Storage", + "StorageV2", "BlobStorage", "FileStorage", "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :ivar locations: The set of locations that the SKU is available. This will be supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). + :vartype locations: list[str] + :ivar capabilities: The capability information in the specified SKU, including file encryption, + network ACLs, change notification, etc. + :vartype capabilities: list[~azure.mgmt.storage.v2021_06_01.models.SKUCapability] + :param restrictions: The restrictions because of which SKU cannot be used. This is empty if + there are no restrictions. + :type restrictions: list[~azure.mgmt.storage.v2021_06_01.models.Restriction] + """ + + _validation = { + 'name': {'required': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'kind': {'readonly': True}, + 'locations': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'locations': {'key': 'locations', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuInformation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.tier = None + self.resource_type = None + self.kind = None + self.locations = None + self.capabilities = None + self.restrictions = kwargs.get('restrictions', None) + + +class SmbSetting(msrest.serialization.Model): + """Setting for SMB protocol. + + :param multichannel: Multichannel setting. Applies to Premium FileStorage only. + :type multichannel: ~azure.mgmt.storage.v2021_06_01.models.Multichannel + :param versions: SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, + SMB3.1.1. Should be passed as a string with delimiter ';'. + :type versions: str + :param authentication_methods: SMB authentication methods supported by server. Valid values are + NTLMv2, Kerberos. Should be passed as a string with delimiter ';'. + :type authentication_methods: str + :param kerberos_ticket_encryption: Kerberos ticket encryption supported by server. Valid values + are RC4-HMAC, AES-256. Should be passed as a string with delimiter ';'. + :type kerberos_ticket_encryption: str + :param channel_encryption: SMB channel encryption supported by server. Valid values are + AES-128-CCM, AES-128-GCM, AES-256-GCM. Should be passed as a string with delimiter ';'. + :type channel_encryption: str + """ + + _attribute_map = { + 'multichannel': {'key': 'multichannel', 'type': 'Multichannel'}, + 'versions': {'key': 'versions', 'type': 'str'}, + 'authentication_methods': {'key': 'authenticationMethods', 'type': 'str'}, + 'kerberos_ticket_encryption': {'key': 'kerberosTicketEncryption', 'type': 'str'}, + 'channel_encryption': {'key': 'channelEncryption', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SmbSetting, self).__init__(**kwargs) + self.multichannel = kwargs.get('multichannel', None) + self.versions = kwargs.get('versions', None) + self.authentication_methods = kwargs.get('authentication_methods', None) + self.kerberos_ticket_encryption = kwargs.get('kerberos_ticket_encryption', None) + self.channel_encryption = kwargs.get('channel_encryption', None) + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs['location'] + + +class StorageAccount(TrackedResource): + """The storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar sku: Gets the SKU. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :ivar kind: Gets the Kind. Possible values include: "Storage", "StorageV2", "BlobStorage", + "FileStorage", "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocation + :ivar provisioning_state: Gets the status of the storage account at the time the operation was + called. Possible values include: "Creating", "ResolvingDNS", "Succeeded". + :vartype provisioning_state: str or ~azure.mgmt.storage.v2021_06_01.models.ProvisioningState + :ivar primary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob + endpoint. + :vartype primary_endpoints: ~azure.mgmt.storage.v2021_06_01.models.Endpoints + :ivar primary_location: Gets the location of the primary data center for the storage account. + :vartype primary_location: str + :ivar status_of_primary: Gets the status indicating whether the primary location of the storage + account is available or unavailable. Possible values include: "available", "unavailable". + :vartype status_of_primary: str or ~azure.mgmt.storage.v2021_06_01.models.AccountStatus + :ivar last_geo_failover_time: Gets the timestamp of the most recent instance of a failover to + the secondary location. Only the most recent timestamp is retained. This element is not + returned if there has never been a failover instance. Only available if the accountType is + Standard_GRS or Standard_RAGRS. + :vartype last_geo_failover_time: ~datetime.datetime + :ivar secondary_location: Gets the location of the geo-replicated secondary for the storage + account. Only available if the accountType is Standard_GRS or Standard_RAGRS. + :vartype secondary_location: str + :ivar status_of_secondary: Gets the status indicating whether the secondary location of the + storage account is available or unavailable. Only available if the SKU name is Standard_GRS or + Standard_RAGRS. Possible values include: "available", "unavailable". + :vartype status_of_secondary: str or ~azure.mgmt.storage.v2021_06_01.models.AccountStatus + :ivar creation_time: Gets the creation date and time of the storage account in UTC. + :vartype creation_time: ~datetime.datetime + :ivar custom_domain: Gets the custom domain the user assigned to this storage account. + :vartype custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :ivar sas_policy: SasPolicy assigned to the storage account. + :vartype sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :ivar key_policy: KeyPolicy assigned to the storage account. + :vartype key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :ivar key_creation_time: Storage account keys creation time. + :vartype key_creation_time: ~azure.mgmt.storage.v2021_06_01.models.KeyCreationTime + :ivar secondary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object from the secondary location of the storage account. Only available if + the SKU name is Standard_RAGRS. + :vartype secondary_endpoints: ~azure.mgmt.storage.v2021_06_01.models.Endpoints + :ivar encryption: Gets the encryption settings on the account. If unspecified, the account is + unencrypted. + :vartype encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier used + for billing. Possible values include: "Hot", "Cool". + :vartype access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :type enable_https_traffic_only: bool + :ivar network_rule_set: Network rule set. + :vartype network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :type is_hns_enabled: bool + :ivar geo_replication_stats: Geo Replication Stats. + :vartype geo_replication_stats: ~azure.mgmt.storage.v2021_06_01.models.GeoReplicationStats + :ivar failover_in_progress: If the failover is in progress, the value will be true, otherwise, + it will be null. + :vartype failover_in_progress: bool + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :ivar private_endpoint_connections: List of private endpoint connection associated with the + specified storage account. + :vartype private_endpoint_connections: + list[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection] + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :ivar blob_restore_status: Blob restore status. + :vartype blob_restore_status: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreStatus + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :type enable_nfs_v3: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'sku': {'readonly': True}, + 'kind': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'primary_endpoints': {'readonly': True}, + 'primary_location': {'readonly': True}, + 'status_of_primary': {'readonly': True}, + 'last_geo_failover_time': {'readonly': True}, + 'secondary_location': {'readonly': True}, + 'status_of_secondary': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'custom_domain': {'readonly': True}, + 'sas_policy': {'readonly': True}, + 'key_policy': {'readonly': True}, + 'key_creation_time': {'readonly': True}, + 'secondary_endpoints': {'readonly': True}, + 'encryption': {'readonly': True}, + 'access_tier': {'readonly': True}, + 'network_rule_set': {'readonly': True}, + 'geo_replication_stats': {'readonly': True}, + 'failover_in_progress': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'blob_restore_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'primary_endpoints': {'key': 'properties.primaryEndpoints', 'type': 'Endpoints'}, + 'primary_location': {'key': 'properties.primaryLocation', 'type': 'str'}, + 'status_of_primary': {'key': 'properties.statusOfPrimary', 'type': 'str'}, + 'last_geo_failover_time': {'key': 'properties.lastGeoFailoverTime', 'type': 'iso-8601'}, + 'secondary_location': {'key': 'properties.secondaryLocation', 'type': 'str'}, + 'status_of_secondary': {'key': 'properties.statusOfSecondary', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'key_creation_time': {'key': 'properties.keyCreationTime', 'type': 'KeyCreationTime'}, + 'secondary_endpoints': {'key': 'properties.secondaryEndpoints', 'type': 'Endpoints'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'is_hns_enabled': {'key': 'properties.isHnsEnabled', 'type': 'bool'}, + 'geo_replication_stats': {'key': 'properties.geoReplicationStats', 'type': 'GeoReplicationStats'}, + 'failover_in_progress': {'key': 'properties.failoverInProgress', 'type': 'bool'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'blob_restore_status': {'key': 'properties.blobRestoreStatus', 'type': 'BlobRestoreStatus'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'enable_nfs_v3': {'key': 'properties.isNfsV3Enabled', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccount, self).__init__(**kwargs) + self.sku = None + self.kind = None + self.identity = kwargs.get('identity', None) + self.extended_location = kwargs.get('extended_location', None) + self.provisioning_state = None + self.primary_endpoints = None + self.primary_location = None + self.status_of_primary = None + self.last_geo_failover_time = None + self.secondary_location = None + self.status_of_secondary = None + self.creation_time = None + self.custom_domain = None + self.sas_policy = None + self.key_policy = None + self.key_creation_time = None + self.secondary_endpoints = None + self.encryption = None + self.access_tier = None + self.azure_files_identity_based_authentication = kwargs.get('azure_files_identity_based_authentication', None) + self.enable_https_traffic_only = kwargs.get('enable_https_traffic_only', None) + self.network_rule_set = None + self.is_hns_enabled = kwargs.get('is_hns_enabled', None) + self.geo_replication_stats = None + self.failover_in_progress = None + self.large_file_shares_state = kwargs.get('large_file_shares_state', None) + self.private_endpoint_connections = None + self.routing_preference = kwargs.get('routing_preference', None) + self.blob_restore_status = None + self.allow_blob_public_access = kwargs.get('allow_blob_public_access', None) + self.minimum_tls_version = kwargs.get('minimum_tls_version', None) + self.allow_shared_key_access = kwargs.get('allow_shared_key_access', None) + self.enable_nfs_v3 = kwargs.get('enable_nfs_v3', None) + self.allow_cross_tenant_replication = kwargs.get('allow_cross_tenant_replication', None) + self.default_to_o_auth_authentication = kwargs.get('default_to_o_auth_authentication', None) + self.public_network_access = kwargs.get('public_network_access', None) + self.immutable_storage_with_versioning = kwargs.get('immutable_storage_with_versioning', None) + + +class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): + """The parameters used to check the availability of the storage account name. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The storage account name. + :type name: str + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + type = "Microsoft.Storage/storageAccounts" + + def __init__( + self, + **kwargs + ): + super(StorageAccountCheckNameAvailabilityParameters, self).__init__(**kwargs) + self.name = kwargs['name'] + + +class StorageAccountCreateParameters(msrest.serialization.Model): + """The parameters used when creating a storage account. + + All required parameters must be populated in order to send to Azure. + + :param sku: Required. Required. Gets or sets the SKU name. + :type sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param kind: Required. Required. Indicates the type of storage account. Possible values + include: "Storage", "StorageV2", "BlobStorage", "FileStorage", "BlockBlobStorage". + :type kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param location: Required. Required. Gets or sets the location of the resource. This will be + one of the supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, + etc.). The geo region of a resource cannot be changed once it is created, but if an identical + geo region is specified on update, the request will succeed. + :type location: str + :param extended_location: Optional. Set the extended location of the resource. If not set, the + storage account will be created in Azure main region. Otherwise it will be created in the + specified extended location. + :type extended_location: ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocation + :param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource. + These tags can be used for viewing and grouping this resource (across resource groups). A + maximum of 15 tags can be provided for a resource. Each tag must have a key with a length no + greater than 128 characters and a value with a length no greater than 256 characters. + :type tags: dict[str, str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param sas_policy: SasPolicy assigned to the storage account. + :type sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :param key_policy: KeyPolicy assigned to the storage account. + :type key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :param custom_domain: User domain assigned to the storage account. Name is the CNAME source. + Only one custom domain is supported per storage account at this time. To clear the existing + custom domain, use an empty string for the custom domain name property. + :type custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :param encryption: Not applicable. Azure Storage encryption is enabled for all storage accounts + and cannot be disabled. + :type encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :param network_rule_set: Network rule set. + :type network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier + used for billing. Possible values include: "Hot", "Cool". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + The default value is true since API version 2019-04-01. + :type enable_https_traffic_only: bool + :param is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :type is_hns_enabled: bool + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :type enable_nfs_v3: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the new containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _validation = { + 'sku': {'required': True}, + 'kind': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'is_hns_enabled': {'key': 'properties.isHnsEnabled', 'type': 'bool'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'enable_nfs_v3': {'key': 'properties.isNfsV3Enabled', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountCreateParameters, self).__init__(**kwargs) + self.sku = kwargs['sku'] + self.kind = kwargs['kind'] + self.location = kwargs['location'] + self.extended_location = kwargs.get('extended_location', None) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) + self.public_network_access = kwargs.get('public_network_access', None) + self.sas_policy = kwargs.get('sas_policy', None) + self.key_policy = kwargs.get('key_policy', None) + self.custom_domain = kwargs.get('custom_domain', None) + self.encryption = kwargs.get('encryption', None) + self.network_rule_set = kwargs.get('network_rule_set', None) + self.access_tier = kwargs.get('access_tier', None) + self.azure_files_identity_based_authentication = kwargs.get('azure_files_identity_based_authentication', None) + self.enable_https_traffic_only = kwargs.get('enable_https_traffic_only', None) + self.is_hns_enabled = kwargs.get('is_hns_enabled', None) + self.large_file_shares_state = kwargs.get('large_file_shares_state', None) + self.routing_preference = kwargs.get('routing_preference', None) + self.allow_blob_public_access = kwargs.get('allow_blob_public_access', None) + self.minimum_tls_version = kwargs.get('minimum_tls_version', None) + self.allow_shared_key_access = kwargs.get('allow_shared_key_access', None) + self.enable_nfs_v3 = kwargs.get('enable_nfs_v3', None) + self.allow_cross_tenant_replication = kwargs.get('allow_cross_tenant_replication', None) + self.default_to_o_auth_authentication = kwargs.get('default_to_o_auth_authentication', None) + self.immutable_storage_with_versioning = kwargs.get('immutable_storage_with_versioning', None) + + +class StorageAccountInternetEndpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, file, web or dfs object via a internet routing endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + _validation = { + 'blob': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountInternetEndpoints, self).__init__(**kwargs) + self.blob = None + self.file = None + self.web = None + self.dfs = None + + +class StorageAccountKey(msrest.serialization.Model): + """An access key for the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar key_name: Name of the key. + :vartype key_name: str + :ivar value: Base 64-encoded value of the key. + :vartype value: str + :ivar permissions: Permissions for the key -- read-only or full permissions. Possible values + include: "Read", "Full". + :vartype permissions: str or ~azure.mgmt.storage.v2021_06_01.models.KeyPermission + :ivar creation_time: Creation time of the key, in round trip date format. + :vartype creation_time: ~datetime.datetime + """ + + _validation = { + 'key_name': {'readonly': True}, + 'value': {'readonly': True}, + 'permissions': {'readonly': True}, + 'creation_time': {'readonly': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountKey, self).__init__(**kwargs) + self.key_name = None + self.value = None + self.permissions = None + self.creation_time = None + + +class StorageAccountListKeysResult(msrest.serialization.Model): + """The response from the ListKeys operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar keys: Gets the list of storage account keys and their properties for the specified + storage account. + :vartype keys: list[~azure.mgmt.storage.v2021_06_01.models.StorageAccountKey] + """ + + _validation = { + 'keys': {'readonly': True}, + } + + _attribute_map = { + 'keys': {'key': 'keys', 'type': '[StorageAccountKey]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountListKeysResult, self).__init__(**kwargs) + self.keys = None + + +class StorageAccountListResult(msrest.serialization.Model): + """The response from the List Storage Accounts operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets the list of storage accounts and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.StorageAccount] + :ivar next_link: Request URL that can be used to query next page of storage accounts. Returned + when total number of requested storage accounts exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StorageAccount]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class StorageAccountMicrosoftEndpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object via a microsoft routing endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + _validation = { + 'blob': {'readonly': True}, + 'queue': {'readonly': True}, + 'table': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'queue': {'key': 'queue', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountMicrosoftEndpoints, self).__init__(**kwargs) + self.blob = None + self.queue = None + self.table = None + self.file = None + self.web = None + self.dfs = None + + +class StorageAccountRegenerateKeyParameters(msrest.serialization.Model): + """The parameters used to regenerate the storage account key. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of storage keys that want to be regenerated, possible + values are key1, key2, kerb1, kerb2. + :type key_name: str + """ + + _validation = { + 'key_name': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs['key_name'] + + +class StorageAccountUpdateParameters(msrest.serialization.Model): + """The parameters that can be provided when updating the storage account properties. + + :param sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to + Standard_ZRS, Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any + other value. + :type sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource. + These tags can be used in viewing and grouping this resource (across resource groups). A + maximum of 15 tags can be provided for a resource. Each tag must have a key no greater in + length than 128 characters and a value no greater in length than 256 characters. + :type tags: dict[str, str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param kind: Optional. Indicates the type of storage account. Currently only StorageV2 value + supported by server. Possible values include: "Storage", "StorageV2", "BlobStorage", + "FileStorage", "BlockBlobStorage". + :type kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param custom_domain: Custom domain assigned to the storage account by the user. Name is the + CNAME source. Only one custom domain is supported per storage account at this time. To clear + the existing custom domain, use an empty string for the custom domain name property. + :type custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :param encryption: Provides the encryption settings on the account. The default setting is + unencrypted. + :type encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :param sas_policy: SasPolicy assigned to the storage account. + :type sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :param key_policy: KeyPolicy assigned to the storage account. + :type key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier + used for billing. Possible values include: "Hot", "Cool". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :type enable_https_traffic_only: bool + :param network_rule_set: Network rule set. + :type network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _attribute_map = { + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountUpdateParameters, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.custom_domain = kwargs.get('custom_domain', None) + self.encryption = kwargs.get('encryption', None) + self.sas_policy = kwargs.get('sas_policy', None) + self.key_policy = kwargs.get('key_policy', None) + self.access_tier = kwargs.get('access_tier', None) + self.azure_files_identity_based_authentication = kwargs.get('azure_files_identity_based_authentication', None) + self.enable_https_traffic_only = kwargs.get('enable_https_traffic_only', None) + self.network_rule_set = kwargs.get('network_rule_set', None) + self.large_file_shares_state = kwargs.get('large_file_shares_state', None) + self.routing_preference = kwargs.get('routing_preference', None) + self.allow_blob_public_access = kwargs.get('allow_blob_public_access', None) + self.minimum_tls_version = kwargs.get('minimum_tls_version', None) + self.allow_shared_key_access = kwargs.get('allow_shared_key_access', None) + self.allow_cross_tenant_replication = kwargs.get('allow_cross_tenant_replication', None) + self.default_to_o_auth_authentication = kwargs.get('default_to_o_auth_authentication', None) + self.public_network_access = kwargs.get('public_network_access', None) + self.immutable_storage_with_versioning = kwargs.get('immutable_storage_with_versioning', None) + + +class StorageQueue(Resource): + """StorageQueue. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param metadata: A name-value pair that represents queue metadata. + :type metadata: dict[str, str] + :ivar approximate_message_count: Integer indicating an approximate number of messages in the + queue. This number is not lower than the actual number of messages in the queue, but could be + higher. + :vartype approximate_message_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'approximate_message_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'approximate_message_count': {'key': 'properties.approximateMessageCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageQueue, self).__init__(**kwargs) + self.metadata = kwargs.get('metadata', None) + self.approximate_message_count = None + + +class StorageSkuListResult(msrest.serialization.Model): + """The response from the List Storage SKUs operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Get the list result of storage SKUs and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.SkuInformation] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SkuInformation]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageSkuListResult, self).__init__(**kwargs) + self.value = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.storage.v2021_06_01.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure.mgmt.storage.v2021_06_01.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class Table(Resource): + """Properties of the table, including Id, resource name, resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar table_name: Table name under the specified account. + :vartype table_name: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'table_name': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'properties.tableName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Table, self).__init__(**kwargs) + self.table_name = None + + +class TableServiceProperties(Resource): + """The properties of a storage account’s Table service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param cors: Specifies CORS rules for the Table service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Table service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + } + + def __init__( + self, + **kwargs + ): + super(TableServiceProperties, self).__init__(**kwargs) + self.cors = kwargs.get('cors', None) + + +class TagFilter(msrest.serialization.Model): + """Blob index tag based filtering for blob objects. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. This is the filter tag name, it can have 1 - 128 characters. + :type name: str + :param op: Required. This is the comparison operator which is used for object comparison and + filtering. Only == (equality operator) is currently supported. + :type op: str + :param value: Required. This is the filter tag value field used for tag based filtering, it can + have 0 - 256 characters. + :type value: str + """ + + _validation = { + 'name': {'required': True, 'max_length': 128, 'min_length': 1}, + 'op': {'required': True}, + 'value': {'required': True, 'max_length': 256, 'min_length': 0}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'op': {'key': 'op', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TagFilter, self).__init__(**kwargs) + self.name = kwargs['name'] + self.op = kwargs['op'] + self.value = kwargs['value'] + + +class TagProperty(msrest.serialization.Model): + """A tag of the LegalHold of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar tag: The tag value. + :vartype tag: str + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who added the tag. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who added the tag. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who added the tag. + :vartype upn: str + """ + + _validation = { + 'tag': {'readonly': True}, + 'timestamp': {'readonly': True}, + 'object_identifier': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'upn': {'readonly': True}, + } + + _attribute_map = { + 'tag': {'key': 'tag', 'type': 'str'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + 'object_identifier': {'key': 'objectIdentifier', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'upn': {'key': 'upn', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TagProperty, self).__init__(**kwargs) + self.tag = None + self.timestamp = None + self.object_identifier = None + self.tenant_id = None + self.upn = None + + +class UpdateHistoryProperty(msrest.serialization.Model): + """An update history of the ImmutabilityPolicy of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar update: The ImmutabilityPolicy update type of a blob container, possible values include: + put, lock and extend. Possible values include: "put", "lock", "extend". + :vartype update: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyUpdateType + :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :vartype immutability_period_since_creation_in_days: int + :ivar timestamp: Returns the date and time the ImmutabilityPolicy was updated. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who updated the ImmutabilityPolicy. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who updated the + ImmutabilityPolicy. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who updated the ImmutabilityPolicy. + :vartype upn: str + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'update': {'readonly': True}, + 'immutability_period_since_creation_in_days': {'readonly': True}, + 'timestamp': {'readonly': True}, + 'object_identifier': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'upn': {'readonly': True}, + } + + _attribute_map = { + 'update': {'key': 'update', 'type': 'str'}, + 'immutability_period_since_creation_in_days': {'key': 'immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + 'object_identifier': {'key': 'objectIdentifier', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'upn': {'key': 'upn', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateHistoryProperty, self).__init__(**kwargs) + self.update = None + self.immutability_period_since_creation_in_days = None + self.timestamp = None + self.object_identifier = None + self.tenant_id = None + self.upn = None + self.allow_protected_append_writes = kwargs.get('allow_protected_append_writes', None) + self.allow_protected_append_writes_all = kwargs.get('allow_protected_append_writes_all', None) + + +class Usage(msrest.serialization.Model): + """Describes Storage Resource Usage. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar unit: Gets the unit of measurement. Possible values include: "Count", "Bytes", "Seconds", + "Percent", "CountsPerSecond", "BytesPerSecond". + :vartype unit: str or ~azure.mgmt.storage.v2021_06_01.models.UsageUnit + :ivar current_value: Gets the current count of the allocated resources in the subscription. + :vartype current_value: int + :ivar limit: Gets the maximum count of the resources that can be allocated in the subscription. + :vartype limit: int + :ivar name: Gets the name of the type of usage. + :vartype name: ~azure.mgmt.storage.v2021_06_01.models.UsageName + """ + + _validation = { + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, + } + + _attribute_map = { + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'int'}, + 'limit': {'key': 'limit', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'UsageName'}, + } + + def __init__( + self, + **kwargs + ): + super(Usage, self).__init__(**kwargs) + self.unit = None + self.current_value = None + self.limit = None + self.name = None + + +class UsageListResult(msrest.serialization.Model): + """The response from the List Usages operation. + + :param value: Gets or sets the list of Storage Resource Usages. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.Usage] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Usage]'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class UsageName(msrest.serialization.Model): + """The usage names that can be used; currently limited to StorageAccount. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets a string describing the resource name. + :vartype value: str + :ivar localized_value: Gets a localized string describing the resource name. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class UserAssignedIdentity(msrest.serialization.Model): + """UserAssignedIdentity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the identity. + :vartype principal_id: str + :ivar client_id: The client ID of the identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class VirtualNetworkRule(msrest.serialization.Model): + """Virtual Network rule. + + All required parameters must be populated in order to send to Azure. + + :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: + /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. + :type virtual_network_resource_id: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str + :param state: Gets the state of virtual network rule. Possible values include: "Provisioning", + "Deprovisioning", "Succeeded", "Failed", "NetworkSourceDeleted". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.State + """ + + _validation = { + 'virtual_network_resource_id': {'required': True}, + } + + _attribute_map = { + 'virtual_network_resource_id': {'key': 'id', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualNetworkRule, self).__init__(**kwargs) + self.virtual_network_resource_id = kwargs['virtual_network_resource_id'] + self.action = kwargs.get('action', None) + self.state = kwargs.get('state', None) diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models_py3.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models_py3.py new file mode 100644 index 000000000000..59c6c288ff9d --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_models_py3.py @@ -0,0 +1,6424 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._storage_management_client_enums import * + + +class AccessPolicy(msrest.serialization.Model): + """AccessPolicy. + + :param start_time: Start time of the access policy. + :type start_time: ~datetime.datetime + :param expiry_time: Expiry time of the access policy. + :type expiry_time: ~datetime.datetime + :param permission: List of abbreviated permissions. + :type permission: str + """ + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'permission': {'key': 'permission', 'type': 'str'}, + } + + def __init__( + self, + *, + start_time: Optional[datetime.datetime] = None, + expiry_time: Optional[datetime.datetime] = None, + permission: Optional[str] = None, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start_time = start_time + self.expiry_time = expiry_time + self.permission = permission + + +class AccountImmutabilityPolicyProperties(msrest.serialization.Model): + """This defines account-level immutability policy properties. + + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :param state: The ImmutabilityPolicy state defines the mode of the policy. Disabled state + disables the policy, Unlocked state allows increase and decrease of immutability retention time + and also allows toggling allowProtectedAppendWrites property, Locked state only allows the + increase of the immutability retention time. A policy can only be created in a Disabled or + Unlocked state and can be toggled between the two states. Only a policy in an Unlocked state + can transition to a Locked state which cannot be reverted. Possible values include: "Unlocked", + "Locked", "Disabled". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.AccountImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for disabled and + unlocked time-based retention policies. When enabled, new blocks can be written to an append + blob while maintaining immutability protection and compliance. Only new blocks can be added and + any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes: bool + """ + + _validation = { + 'immutability_period_since_creation_in_days': {'maximum': 146000, 'minimum': 1}, + } + + _attribute_map = { + 'immutability_period_since_creation_in_days': {'key': 'immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'allowProtectedAppendWrites', 'type': 'bool'}, + } + + def __init__( + self, + *, + immutability_period_since_creation_in_days: Optional[int] = None, + state: Optional[Union[str, "AccountImmutabilityPolicyState"]] = None, + allow_protected_append_writes: Optional[bool] = None, + **kwargs + ): + super(AccountImmutabilityPolicyProperties, self).__init__(**kwargs) + self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days + self.state = state + self.allow_protected_append_writes = allow_protected_append_writes + + +class AccountSasParameters(msrest.serialization.Model): + """The parameters to list SAS credentials of a storage account. + + All required parameters must be populated in order to send to Azure. + + :param services: Required. The signed services accessible with the account SAS. Possible values + include: Blob (b), Queue (q), Table (t), File (f). Possible values include: "b", "q", "t", "f". + :type services: str or ~azure.mgmt.storage.v2021_06_01.models.Services + :param resource_types: Required. The signed resource types that are accessible with the account + SAS. Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; + Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. + Possible values include: "s", "c", "o". + :type resource_types: str or ~azure.mgmt.storage.v2021_06_01.models.SignedResourceTypes + :param permissions: Required. The signed permissions for the account SAS. Possible values + include: Read (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process + (p). Possible values include: "r", "d", "w", "l", "a", "c", "u", "p". + :type permissions: str or ~azure.mgmt.storage.v2021_06_01.models.Permissions + :param ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :type ip_address_or_range: str + :param protocols: The protocol permitted for a request made with the account SAS. Possible + values include: "https,http", "https". + :type protocols: str or ~azure.mgmt.storage.v2021_06_01.models.HttpProtocol + :param shared_access_start_time: The time at which the SAS becomes valid. + :type shared_access_start_time: ~datetime.datetime + :param shared_access_expiry_time: Required. The time at which the shared access signature + becomes invalid. + :type shared_access_expiry_time: ~datetime.datetime + :param key_to_sign: The key to sign the account SAS token with. + :type key_to_sign: str + """ + + _validation = { + 'services': {'required': True}, + 'resource_types': {'required': True}, + 'permissions': {'required': True}, + 'shared_access_expiry_time': {'required': True}, + } + + _attribute_map = { + 'services': {'key': 'signedServices', 'type': 'str'}, + 'resource_types': {'key': 'signedResourceTypes', 'type': 'str'}, + 'permissions': {'key': 'signedPermission', 'type': 'str'}, + 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, + 'protocols': {'key': 'signedProtocol', 'type': 'str'}, + 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, + 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, + 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, + } + + def __init__( + self, + *, + services: Union[str, "Services"], + resource_types: Union[str, "SignedResourceTypes"], + permissions: Union[str, "Permissions"], + shared_access_expiry_time: datetime.datetime, + ip_address_or_range: Optional[str] = None, + protocols: Optional[Union[str, "HttpProtocol"]] = None, + shared_access_start_time: Optional[datetime.datetime] = None, + key_to_sign: Optional[str] = None, + **kwargs + ): + super(AccountSasParameters, self).__init__(**kwargs) + self.services = services + self.resource_types = resource_types + self.permissions = permissions + self.ip_address_or_range = ip_address_or_range + self.protocols = protocols + self.shared_access_start_time = shared_access_start_time + self.shared_access_expiry_time = shared_access_expiry_time + self.key_to_sign = key_to_sign + + +class ActiveDirectoryProperties(msrest.serialization.Model): + """Settings properties for Active Directory (AD). + + All required parameters must be populated in order to send to Azure. + + :param domain_name: Required. Specifies the primary domain that the AD DNS server is + authoritative for. + :type domain_name: str + :param net_bios_domain_name: Required. Specifies the NetBIOS domain name. + :type net_bios_domain_name: str + :param forest_name: Required. Specifies the Active Directory forest to get. + :type forest_name: str + :param domain_guid: Required. Specifies the domain GUID. + :type domain_guid: str + :param domain_sid: Required. Specifies the security identifier (SID). + :type domain_sid: str + :param azure_storage_sid: Required. Specifies the security identifier (SID) for Azure Storage. + :type azure_storage_sid: str + """ + + _validation = { + 'domain_name': {'required': True}, + 'net_bios_domain_name': {'required': True}, + 'forest_name': {'required': True}, + 'domain_guid': {'required': True}, + 'domain_sid': {'required': True}, + 'azure_storage_sid': {'required': True}, + } + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'net_bios_domain_name': {'key': 'netBiosDomainName', 'type': 'str'}, + 'forest_name': {'key': 'forestName', 'type': 'str'}, + 'domain_guid': {'key': 'domainGuid', 'type': 'str'}, + 'domain_sid': {'key': 'domainSid', 'type': 'str'}, + 'azure_storage_sid': {'key': 'azureStorageSid', 'type': 'str'}, + } + + def __init__( + self, + *, + domain_name: str, + net_bios_domain_name: str, + forest_name: str, + domain_guid: str, + domain_sid: str, + azure_storage_sid: str, + **kwargs + ): + super(ActiveDirectoryProperties, self).__init__(**kwargs) + self.domain_name = domain_name + self.net_bios_domain_name = net_bios_domain_name + self.forest_name = forest_name + self.domain_guid = domain_guid + self.domain_sid = domain_sid + self.azure_storage_sid = azure_storage_sid + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class AzureEntityResource(Resource): + """The resource model definition for an Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class AzureFilesIdentityBasedAuthentication(msrest.serialization.Model): + """Settings for Azure Files identity based authentication. + + All required parameters must be populated in order to send to Azure. + + :param directory_service_options: Required. Indicates the directory service used. Possible + values include: "None", "AADDS", "AD". + :type directory_service_options: str or + ~azure.mgmt.storage.v2021_06_01.models.DirectoryServiceOptions + :param active_directory_properties: Required if choose AD. + :type active_directory_properties: + ~azure.mgmt.storage.v2021_06_01.models.ActiveDirectoryProperties + :param default_share_permission: Default share permission for users using Kerberos + authentication if RBAC role is not assigned. Possible values include: "None", + "StorageFileDataSmbShareReader", "StorageFileDataSmbShareContributor", + "StorageFileDataSmbShareElevatedContributor". + :type default_share_permission: str or + ~azure.mgmt.storage.v2021_06_01.models.DefaultSharePermission + """ + + _validation = { + 'directory_service_options': {'required': True}, + } + + _attribute_map = { + 'directory_service_options': {'key': 'directoryServiceOptions', 'type': 'str'}, + 'active_directory_properties': {'key': 'activeDirectoryProperties', 'type': 'ActiveDirectoryProperties'}, + 'default_share_permission': {'key': 'defaultSharePermission', 'type': 'str'}, + } + + def __init__( + self, + *, + directory_service_options: Union[str, "DirectoryServiceOptions"], + active_directory_properties: Optional["ActiveDirectoryProperties"] = None, + default_share_permission: Optional[Union[str, "DefaultSharePermission"]] = None, + **kwargs + ): + super(AzureFilesIdentityBasedAuthentication, self).__init__(**kwargs) + self.directory_service_options = directory_service_options + self.active_directory_properties = active_directory_properties + self.default_share_permission = default_share_permission + + +class BlobContainer(AzureEntityResource): + """Properties of the blob container, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar version: The version of the deleted blob container. + :vartype version: str + :ivar deleted: Indicates whether the blob container was deleted. + :vartype deleted: bool + :ivar deleted_time: Blob container deletion time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. + :vartype remaining_retention_days: int + :param default_encryption_scope: Default the container to use specified encryption scope for + all writes. + :type default_encryption_scope: str + :param deny_encryption_scope_override: Block override of encryption scope from the container + default. + :type deny_encryption_scope_override: bool + :param public_access: Specifies whether data in the container may be accessed publicly and the + level of access. Possible values include: "Container", "Blob", "None". + :type public_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicAccess + :ivar last_modified_time: Returns the date and time the container was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar lease_status: The lease status of the container. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the container. Possible values include: "Available", + "Leased", "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed + duration, only when the container is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param metadata: A name-value pair to associate with the container as metadata. + :type metadata: dict[str, str] + :ivar immutability_policy: The ImmutabilityPolicy property of the container. + :vartype immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyProperties + :ivar legal_hold: The LegalHold property of the container. + :vartype legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHoldProperties + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP + if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public + property is set to false by SRP if ImmutabilityPolicy has not been created for this container. + :vartype has_immutability_policy: bool + :param immutable_storage_with_versioning: The object level immutability property of the + container. The property is immutable and can only be set to true at the container creation + time. Existing containers must undergo a migration process. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageWithVersioning + :param enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. + :type enable_nfs_v3_root_squash: bool + :param enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. + :type enable_nfs_v3_all_squash: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'immutability_policy': {'readonly': True}, + 'legal_hold': {'readonly': True}, + 'has_legal_hold': {'readonly': True}, + 'has_immutability_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'default_encryption_scope': {'key': 'properties.defaultEncryptionScope', 'type': 'str'}, + 'deny_encryption_scope_override': {'key': 'properties.denyEncryptionScopeOverride', 'type': 'bool'}, + 'public_access': {'key': 'properties.publicAccess', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'immutability_policy': {'key': 'properties.immutabilityPolicy', 'type': 'ImmutabilityPolicyProperties'}, + 'legal_hold': {'key': 'properties.legalHold', 'type': 'LegalHoldProperties'}, + 'has_legal_hold': {'key': 'properties.hasLegalHold', 'type': 'bool'}, + 'has_immutability_policy': {'key': 'properties.hasImmutabilityPolicy', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageWithVersioning'}, + 'enable_nfs_v3_root_squash': {'key': 'properties.enableNfsV3RootSquash', 'type': 'bool'}, + 'enable_nfs_v3_all_squash': {'key': 'properties.enableNfsV3AllSquash', 'type': 'bool'}, + } + + def __init__( + self, + *, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + public_access: Optional[Union[str, "PublicAccess"]] = None, + metadata: Optional[Dict[str, str]] = None, + immutable_storage_with_versioning: Optional["ImmutableStorageWithVersioning"] = None, + enable_nfs_v3_root_squash: Optional[bool] = None, + enable_nfs_v3_all_squash: Optional[bool] = None, + **kwargs + ): + super(BlobContainer, self).__init__(**kwargs) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.default_encryption_scope = default_encryption_scope + self.deny_encryption_scope_override = deny_encryption_scope_override + self.public_access = public_access + self.last_modified_time = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.metadata = metadata + self.immutability_policy = None + self.legal_hold = None + self.has_legal_hold = None + self.has_immutability_policy = None + self.immutable_storage_with_versioning = immutable_storage_with_versioning + self.enable_nfs_v3_root_squash = enable_nfs_v3_root_squash + self.enable_nfs_v3_all_squash = enable_nfs_v3_all_squash + + +class BlobInventoryPolicy(Resource): + """The storage account blob inventory policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.storage.v2021_06_01.models.SystemData + :ivar last_modified_time: Returns the last modified date and time of the blob inventory policy. + :vartype last_modified_time: ~datetime.datetime + :param policy: The storage account blob inventory policy object. It is composed of policy + rules. + :type policy: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicySchema + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'policy': {'key': 'properties.policy', 'type': 'BlobInventoryPolicySchema'}, + } + + def __init__( + self, + *, + policy: Optional["BlobInventoryPolicySchema"] = None, + **kwargs + ): + super(BlobInventoryPolicy, self).__init__(**kwargs) + self.system_data = None + self.last_modified_time = None + self.policy = policy + + +class BlobInventoryPolicyDefinition(msrest.serialization.Model): + """An object that defines the blob inventory rule. + + All required parameters must be populated in order to send to Azure. + + :param filters: An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyFilter + :param format: Required. This is a required field, it specifies the format for the inventory + files. Possible values include: "Csv", "Parquet". + :type format: str or ~azure.mgmt.storage.v2021_06_01.models.Format + :param schedule: Required. This is a required field. This field is used to schedule an + inventory formation. Possible values include: "Daily", "Weekly". + :type schedule: str or ~azure.mgmt.storage.v2021_06_01.models.Schedule + :param object_type: Required. This is a required field. This field specifies the scope of the + inventory created either at the blob or container level. Possible values include: "Blob", + "Container". + :type object_type: str or ~azure.mgmt.storage.v2021_06_01.models.ObjectType + :param schema_fields: Required. This is a required field. This field specifies the fields and + properties of the object to be included in the inventory. The Schema field value 'Name' is + always required. The valid values for this field for the 'Blob' definition.objectType include + 'Name, Creation-Time, Last-Modified, Content-Length, Content-MD5, BlobType, AccessTier, + AccessTierChangeTime, AccessTierInferred, Tags, Expiry-Time, hdi_isfolder, Owner, Group, + Permissions, Acl, Snapshot, VersionId, IsCurrentVersion, Metadata, LastAccessTime'. The valid + values for 'Container' definition.objectType include 'Name, Last-Modified, Metadata, + LeaseStatus, LeaseState, LeaseDuration, PublicAccess, HasImmutabilityPolicy, HasLegalHold'. + Schema field values 'Expiry-Time, hdi_isfolder, Owner, Group, Permissions, Acl' are valid only + for Hns enabled accounts.'Tags' field is only valid for non Hns accounts. + :type schema_fields: list[str] + """ + + _validation = { + 'format': {'required': True}, + 'schedule': {'required': True}, + 'object_type': {'required': True}, + 'schema_fields': {'required': True}, + } + + _attribute_map = { + 'filters': {'key': 'filters', 'type': 'BlobInventoryPolicyFilter'}, + 'format': {'key': 'format', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'str'}, + 'object_type': {'key': 'objectType', 'type': 'str'}, + 'schema_fields': {'key': 'schemaFields', 'type': '[str]'}, + } + + def __init__( + self, + *, + format: Union[str, "Format"], + schedule: Union[str, "Schedule"], + object_type: Union[str, "ObjectType"], + schema_fields: List[str], + filters: Optional["BlobInventoryPolicyFilter"] = None, + **kwargs + ): + super(BlobInventoryPolicyDefinition, self).__init__(**kwargs) + self.filters = filters + self.format = format + self.schedule = schedule + self.object_type = object_type + self.schema_fields = schema_fields + + +class BlobInventoryPolicyFilter(msrest.serialization.Model): + """An object that defines the blob inventory rule filter conditions. For 'Blob' definition.objectType all filter properties are applicable, 'blobTypes' is required and others are optional. For 'Container' definition.objectType only prefixMatch is applicable and is optional. + + :param prefix_match: An array of strings for blob prefixes to be matched. + :type prefix_match: list[str] + :param blob_types: An array of predefined enum values. Valid values include blockBlob, + appendBlob, pageBlob. Hns accounts does not support pageBlobs. This field is required when + definition.objectType property is set to 'Blob'. + :type blob_types: list[str] + :param include_blob_versions: Includes blob versions in blob inventory when value is set to + true. The definition.schemaFields values 'VersionId and IsCurrentVersion' are required if this + property is set to true, else they must be excluded. + :type include_blob_versions: bool + :param include_snapshots: Includes blob snapshots in blob inventory when value is set to true. + The definition.schemaFields value 'Snapshot' is required if this property is set to true, else + it must be excluded. + :type include_snapshots: bool + """ + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'blob_types': {'key': 'blobTypes', 'type': '[str]'}, + 'include_blob_versions': {'key': 'includeBlobVersions', 'type': 'bool'}, + 'include_snapshots': {'key': 'includeSnapshots', 'type': 'bool'}, + } + + def __init__( + self, + *, + prefix_match: Optional[List[str]] = None, + blob_types: Optional[List[str]] = None, + include_blob_versions: Optional[bool] = None, + include_snapshots: Optional[bool] = None, + **kwargs + ): + super(BlobInventoryPolicyFilter, self).__init__(**kwargs) + self.prefix_match = prefix_match + self.blob_types = blob_types + self.include_blob_versions = include_blob_versions + self.include_snapshots = include_snapshots + + +class BlobInventoryPolicyRule(msrest.serialization.Model): + """An object that wraps the blob inventory rule. Each rule is uniquely defined by name. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Rule is enabled when set to true. + :type enabled: bool + :param name: Required. A rule name can contain any combination of alpha numeric characters. + Rule name is case-sensitive. It must be unique within a policy. + :type name: str + :param destination: Required. Container name where blob inventory files are stored. Must be + pre-created. + :type destination: str + :param definition: Required. An object that defines the blob inventory policy rule. + :type definition: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyDefinition + """ + + _validation = { + 'enabled': {'required': True}, + 'name': {'required': True}, + 'destination': {'required': True}, + 'definition': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'destination': {'key': 'destination', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'BlobInventoryPolicyDefinition'}, + } + + def __init__( + self, + *, + enabled: bool, + name: str, + destination: str, + definition: "BlobInventoryPolicyDefinition", + **kwargs + ): + super(BlobInventoryPolicyRule, self).__init__(**kwargs) + self.enabled = enabled + self.name = name + self.destination = destination + self.definition = definition + + +class BlobInventoryPolicySchema(msrest.serialization.Model): + """The storage account blob inventory policy rules. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Policy is enabled if set to true. + :type enabled: bool + :param type: Required. The valid value is Inventory. Possible values include: "Inventory". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.InventoryRuleType + :param rules: Required. The storage account blob inventory policy rules. The rule is applied + when it is enabled. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyRule] + """ + + _validation = { + 'enabled': {'required': True}, + 'type': {'required': True}, + 'rules': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'type': {'key': 'type', 'type': 'str'}, + 'rules': {'key': 'rules', 'type': '[BlobInventoryPolicyRule]'}, + } + + def __init__( + self, + *, + enabled: bool, + type: Union[str, "InventoryRuleType"], + rules: List["BlobInventoryPolicyRule"], + **kwargs + ): + super(BlobInventoryPolicySchema, self).__init__(**kwargs) + self.enabled = enabled + self.type = type + self.rules = rules + + +class BlobRestoreParameters(msrest.serialization.Model): + """Blob restore parameters. + + All required parameters must be populated in order to send to Azure. + + :param time_to_restore: Required. Restore blob to the specified time. + :type time_to_restore: ~datetime.datetime + :param blob_ranges: Required. Blob ranges to restore. + :type blob_ranges: list[~azure.mgmt.storage.v2021_06_01.models.BlobRestoreRange] + """ + + _validation = { + 'time_to_restore': {'required': True}, + 'blob_ranges': {'required': True}, + } + + _attribute_map = { + 'time_to_restore': {'key': 'timeToRestore', 'type': 'iso-8601'}, + 'blob_ranges': {'key': 'blobRanges', 'type': '[BlobRestoreRange]'}, + } + + def __init__( + self, + *, + time_to_restore: datetime.datetime, + blob_ranges: List["BlobRestoreRange"], + **kwargs + ): + super(BlobRestoreParameters, self).__init__(**kwargs) + self.time_to_restore = time_to_restore + self.blob_ranges = blob_ranges + + +class BlobRestoreRange(msrest.serialization.Model): + """Blob range. + + All required parameters must be populated in order to send to Azure. + + :param start_range: Required. Blob start range. This is inclusive. Empty means account start. + :type start_range: str + :param end_range: Required. Blob end range. This is exclusive. Empty means account end. + :type end_range: str + """ + + _validation = { + 'start_range': {'required': True}, + 'end_range': {'required': True}, + } + + _attribute_map = { + 'start_range': {'key': 'startRange', 'type': 'str'}, + 'end_range': {'key': 'endRange', 'type': 'str'}, + } + + def __init__( + self, + *, + start_range: str, + end_range: str, + **kwargs + ): + super(BlobRestoreRange, self).__init__(**kwargs) + self.start_range = start_range + self.end_range = end_range + + +class BlobRestoreStatus(msrest.serialization.Model): + """Blob restore status. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of blob restore progress. Possible values are: - InProgress: Indicates + that blob restore is ongoing. - Complete: Indicates that blob restore has been completed + successfully. - Failed: Indicates that blob restore is failed. Possible values include: + "InProgress", "Complete", "Failed". + :vartype status: str or ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreProgressStatus + :ivar failure_reason: Failure reason when blob restore is failed. + :vartype failure_reason: str + :ivar restore_id: Id for tracking blob restore request. + :vartype restore_id: str + :ivar parameters: Blob restore request parameters. + :vartype parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreParameters + """ + + _validation = { + 'status': {'readonly': True}, + 'failure_reason': {'readonly': True}, + 'restore_id': {'readonly': True}, + 'parameters': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'failure_reason': {'key': 'failureReason', 'type': 'str'}, + 'restore_id': {'key': 'restoreId', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'BlobRestoreParameters'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobRestoreStatus, self).__init__(**kwargs) + self.status = None + self.failure_reason = None + self.restore_id = None + self.parameters = None + + +class BlobServiceItems(msrest.serialization.Model): + """BlobServiceItems. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blob services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BlobServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobServiceItems, self).__init__(**kwargs) + self.value = None + + +class BlobServiceProperties(Resource): + """The properties of a storage account’s Blob service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param cors: Specifies CORS rules for the Blob service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Blob service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + :param default_service_version: DefaultServiceVersion indicates the default version to use for + requests to the Blob service if an incoming request’s version is not specified. Possible values + include version 2008-10-27 and all more recent versions. + :type default_service_version: str + :param delete_retention_policy: The blob service properties for blob soft delete. + :type delete_retention_policy: ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param is_versioning_enabled: Versioning is enabled if set to true. + :type is_versioning_enabled: bool + :param automatic_snapshot_policy_enabled: Deprecated in favor of isVersioningEnabled property. + :type automatic_snapshot_policy_enabled: bool + :param change_feed: The blob service properties for change feed events. + :type change_feed: ~azure.mgmt.storage.v2021_06_01.models.ChangeFeed + :param restore_policy: The blob service properties for blob restore policy. + :type restore_policy: ~azure.mgmt.storage.v2021_06_01.models.RestorePolicyProperties + :param container_delete_retention_policy: The blob service properties for container soft + delete. + :type container_delete_retention_policy: + ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param last_access_time_tracking_policy: The blob service property to configure last access + time based tracking policy. + :type last_access_time_tracking_policy: + ~azure.mgmt.storage.v2021_06_01.models.LastAccessTimeTrackingPolicy + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'sku': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + 'default_service_version': {'key': 'properties.defaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'properties.deleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'is_versioning_enabled': {'key': 'properties.isVersioningEnabled', 'type': 'bool'}, + 'automatic_snapshot_policy_enabled': {'key': 'properties.automaticSnapshotPolicyEnabled', 'type': 'bool'}, + 'change_feed': {'key': 'properties.changeFeed', 'type': 'ChangeFeed'}, + 'restore_policy': {'key': 'properties.restorePolicy', 'type': 'RestorePolicyProperties'}, + 'container_delete_retention_policy': {'key': 'properties.containerDeleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'last_access_time_tracking_policy': {'key': 'properties.lastAccessTimeTrackingPolicy', 'type': 'LastAccessTimeTrackingPolicy'}, + } + + def __init__( + self, + *, + cors: Optional["CorsRules"] = None, + default_service_version: Optional[str] = None, + delete_retention_policy: Optional["DeleteRetentionPolicy"] = None, + is_versioning_enabled: Optional[bool] = None, + automatic_snapshot_policy_enabled: Optional[bool] = None, + change_feed: Optional["ChangeFeed"] = None, + restore_policy: Optional["RestorePolicyProperties"] = None, + container_delete_retention_policy: Optional["DeleteRetentionPolicy"] = None, + last_access_time_tracking_policy: Optional["LastAccessTimeTrackingPolicy"] = None, + **kwargs + ): + super(BlobServiceProperties, self).__init__(**kwargs) + self.sku = None + self.cors = cors + self.default_service_version = default_service_version + self.delete_retention_policy = delete_retention_policy + self.is_versioning_enabled = is_versioning_enabled + self.automatic_snapshot_policy_enabled = automatic_snapshot_policy_enabled + self.change_feed = change_feed + self.restore_policy = restore_policy + self.container_delete_retention_policy = container_delete_retention_policy + self.last_access_time_tracking_policy = last_access_time_tracking_policy + + +class ChangeFeed(msrest.serialization.Model): + """The blob service properties for change feed events. + + :param enabled: Indicates whether change feed event logging is enabled for the Blob service. + :type enabled: bool + :param retention_in_days: Indicates the duration of changeFeed retention in days. Minimum value + is 1 day and maximum value is 146000 days (400 years). A null value indicates an infinite + retention of the change feed. + :type retention_in_days: int + """ + + _validation = { + 'retention_in_days': {'maximum': 146000, 'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'retention_in_days': {'key': 'retentionInDays', 'type': 'int'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + retention_in_days: Optional[int] = None, + **kwargs + ): + super(ChangeFeed, self).__init__(**kwargs) + self.enabled = enabled + self.retention_in_days = retention_in_days + + +class CheckNameAvailabilityResult(msrest.serialization.Model): + """The CheckNameAvailability operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name_available: Gets a boolean value that indicates whether the name is available for you + to use. If true, the name is available. If false, the name has already been taken or is invalid + and cannot be used. + :vartype name_available: bool + :ivar reason: Gets the reason that a storage account name could not be used. The Reason element + is only returned if NameAvailable is false. Possible values include: "AccountNameInvalid", + "AlreadyExists". + :vartype reason: str or ~azure.mgmt.storage.v2021_06_01.models.Reason + :ivar message: Gets an error message explaining the Reason value in more detail. + :vartype message: str + """ + + _validation = { + 'name_available': {'readonly': True}, + 'reason': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, + 'reason': {'key': 'reason', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CheckNameAvailabilityResult, self).__init__(**kwargs) + self.name_available = None + self.reason = None + self.message = None + + +class CloudErrorAutoGenerated(msrest.serialization.Model): + """An error response from the Storage service. + + :param error: An error response from the Storage service. + :type error: ~azure.mgmt.storage.v2021_06_01.models.CloudErrorBodyAutoGenerated + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'CloudErrorBodyAutoGenerated'}, + } + + def __init__( + self, + *, + error: Optional["CloudErrorBodyAutoGenerated"] = None, + **kwargs + ): + super(CloudErrorAutoGenerated, self).__init__(**kwargs) + self.error = error + + +class CloudErrorBody(msrest.serialization.Model): + """An error response from the Storage service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure.mgmt.storage.v2021_06_01.models.CloudErrorBody] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[CloudErrorBody]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["CloudErrorBody"]] = None, + **kwargs + ): + super(CloudErrorBody, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class CloudErrorBodyAutoGenerated(msrest.serialization.Model): + """An error response from the Storage service. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + :param target: The target of the particular error. For example, the name of the property in + error. + :type target: str + :param details: A list of additional details about the error. + :type details: list[~azure.mgmt.storage.v2021_06_01.models.CloudErrorBodyAutoGenerated] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[CloudErrorBodyAutoGenerated]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["CloudErrorBodyAutoGenerated"]] = None, + **kwargs + ): + super(CloudErrorBodyAutoGenerated, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class CorsRule(msrest.serialization.Model): + """Specifies a CORS rule for the Blob service. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. Required if CorsRule element is present. A list of origin + domains that will be allowed via CORS, or "*" to allow all domains. + :type allowed_origins: list[str] + :param allowed_methods: Required. Required if CorsRule element is present. A list of HTTP + methods that are allowed to be executed by the origin. + :type allowed_methods: list[str or + ~azure.mgmt.storage.v2021_06_01.models.CorsRuleAllowedMethodsItem] + :param max_age_in_seconds: Required. Required if CorsRule element is present. The number of + seconds that the client/browser should cache a preflight response. + :type max_age_in_seconds: int + :param exposed_headers: Required. Required if CorsRule element is present. A list of response + headers to expose to CORS clients. + :type exposed_headers: list[str] + :param allowed_headers: Required. Required if CorsRule element is present. A list of headers + allowed to be part of the cross-origin request. + :type allowed_headers: list[str] + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'max_age_in_seconds': {'required': True}, + 'exposed_headers': {'required': True}, + 'allowed_headers': {'required': True}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'allowedOrigins', 'type': '[str]'}, + 'allowed_methods': {'key': 'allowedMethods', 'type': '[str]'}, + 'max_age_in_seconds': {'key': 'maxAgeInSeconds', 'type': 'int'}, + 'exposed_headers': {'key': 'exposedHeaders', 'type': '[str]'}, + 'allowed_headers': {'key': 'allowedHeaders', 'type': '[str]'}, + } + + def __init__( + self, + *, + allowed_origins: List[str], + allowed_methods: List[Union[str, "CorsRuleAllowedMethodsItem"]], + max_age_in_seconds: int, + exposed_headers: List[str], + allowed_headers: List[str], + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.max_age_in_seconds = max_age_in_seconds + self.exposed_headers = exposed_headers + self.allowed_headers = allowed_headers + + +class CorsRules(msrest.serialization.Model): + """Sets the CORS rules. You can include up to five CorsRule elements in the request. + + :param cors_rules: The List of CORS rules. You can include up to five CorsRule elements in the + request. + :type cors_rules: list[~azure.mgmt.storage.v2021_06_01.models.CorsRule] + """ + + _attribute_map = { + 'cors_rules': {'key': 'corsRules', 'type': '[CorsRule]'}, + } + + def __init__( + self, + *, + cors_rules: Optional[List["CorsRule"]] = None, + **kwargs + ): + super(CorsRules, self).__init__(**kwargs) + self.cors_rules = cors_rules + + +class CustomDomain(msrest.serialization.Model): + """The custom domain assigned to this storage account. This can be set via Update. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Gets or sets the custom domain name assigned to the storage account. + Name is the CNAME source. + :type name: str + :param use_sub_domain_name: Indicates whether indirect CName validation is enabled. Default + value is false. This should only be set on updates. + :type use_sub_domain_name: bool + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'use_sub_domain_name': {'key': 'useSubDomainName', 'type': 'bool'}, + } + + def __init__( + self, + *, + name: str, + use_sub_domain_name: Optional[bool] = None, + **kwargs + ): + super(CustomDomain, self).__init__(**kwargs) + self.name = name + self.use_sub_domain_name = use_sub_domain_name + + +class DateAfterCreation(msrest.serialization.Model): + """Object to define the number of days after creation. + + All required parameters must be populated in order to send to Azure. + + :param days_after_creation_greater_than: Required. Value indicating the age in days after + creation. + :type days_after_creation_greater_than: float + """ + + _validation = { + 'days_after_creation_greater_than': {'required': True, 'minimum': 0, 'multiple': 1}, + } + + _attribute_map = { + 'days_after_creation_greater_than': {'key': 'daysAfterCreationGreaterThan', 'type': 'float'}, + } + + def __init__( + self, + *, + days_after_creation_greater_than: float, + **kwargs + ): + super(DateAfterCreation, self).__init__(**kwargs) + self.days_after_creation_greater_than = days_after_creation_greater_than + + +class DateAfterModification(msrest.serialization.Model): + """Object to define the number of days after object last modification Or last access. Properties daysAfterModificationGreaterThan and daysAfterLastAccessTimeGreaterThan are mutually exclusive. + + :param days_after_modification_greater_than: Value indicating the age in days after last + modification. + :type days_after_modification_greater_than: float + :param days_after_last_access_time_greater_than: Value indicating the age in days after last + blob access. This property can only be used in conjunction with last access time tracking + policy. + :type days_after_last_access_time_greater_than: float + """ + + _validation = { + 'days_after_modification_greater_than': {'minimum': 0, 'multiple': 1}, + 'days_after_last_access_time_greater_than': {'minimum': 0, 'multiple': 1}, + } + + _attribute_map = { + 'days_after_modification_greater_than': {'key': 'daysAfterModificationGreaterThan', 'type': 'float'}, + 'days_after_last_access_time_greater_than': {'key': 'daysAfterLastAccessTimeGreaterThan', 'type': 'float'}, + } + + def __init__( + self, + *, + days_after_modification_greater_than: Optional[float] = None, + days_after_last_access_time_greater_than: Optional[float] = None, + **kwargs + ): + super(DateAfterModification, self).__init__(**kwargs) + self.days_after_modification_greater_than = days_after_modification_greater_than + self.days_after_last_access_time_greater_than = days_after_last_access_time_greater_than + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class DeletedAccount(ProxyResource): + """Deleted storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar storage_account_resource_id: Full resource id of the original storage account. + :vartype storage_account_resource_id: str + :ivar location: Location of the deleted account. + :vartype location: str + :ivar restore_reference: Can be used to attempt recovering this deleted account via + PutStorageAccount API. + :vartype restore_reference: str + :ivar creation_time: Creation time of the deleted account. + :vartype creation_time: str + :ivar deletion_time: Deletion time of the deleted account. + :vartype deletion_time: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'storage_account_resource_id': {'readonly': True}, + 'location': {'readonly': True}, + 'restore_reference': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'deletion_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'}, + 'location': {'key': 'properties.location', 'type': 'str'}, + 'restore_reference': {'key': 'properties.restoreReference', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'str'}, + 'deletion_time': {'key': 'properties.deletionTime', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeletedAccount, self).__init__(**kwargs) + self.storage_account_resource_id = None + self.location = None + self.restore_reference = None + self.creation_time = None + self.deletion_time = None + + +class DeletedAccountListResult(msrest.serialization.Model): + """The response from the List Deleted Accounts operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets the list of deleted accounts and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.DeletedAccount] + :ivar next_link: Request URL that can be used to query next page of deleted accounts. Returned + when total number of requested deleted accounts exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DeletedAccount]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeletedAccountListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class DeletedShare(msrest.serialization.Model): + """The deleted share to be restored. + + All required parameters must be populated in order to send to Azure. + + :param deleted_share_name: Required. Required. Identify the name of the deleted share that will + be restored. + :type deleted_share_name: str + :param deleted_share_version: Required. Required. Identify the version of the deleted share + that will be restored. + :type deleted_share_version: str + """ + + _validation = { + 'deleted_share_name': {'required': True}, + 'deleted_share_version': {'required': True}, + } + + _attribute_map = { + 'deleted_share_name': {'key': 'deletedShareName', 'type': 'str'}, + 'deleted_share_version': {'key': 'deletedShareVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + deleted_share_name: str, + deleted_share_version: str, + **kwargs + ): + super(DeletedShare, self).__init__(**kwargs) + self.deleted_share_name = deleted_share_name + self.deleted_share_version = deleted_share_version + + +class DeleteRetentionPolicy(msrest.serialization.Model): + """The service properties for soft delete. + + :param enabled: Indicates whether DeleteRetentionPolicy is enabled. + :type enabled: bool + :param days: Indicates the number of days that the deleted item should be retained. The minimum + specified value can be 1 and the maximum value can be 365. + :type days: int + """ + + _validation = { + 'days': {'maximum': 365, 'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'days': {'key': 'days', 'type': 'int'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + days: Optional[int] = None, + **kwargs + ): + super(DeleteRetentionPolicy, self).__init__(**kwargs) + self.enabled = enabled + self.days = days + + +class Dimension(msrest.serialization.Model): + """Dimension of blobs, possibly be blob type or access tier. + + :param name: Display name of dimension. + :type name: str + :param display_name: Display name of dimension. + :type display_name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + **kwargs + ): + super(Dimension, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + + +class Encryption(msrest.serialization.Model): + """The encryption settings on the storage account. + + All required parameters must be populated in order to send to Azure. + + :param services: List of services which support encryption. + :type services: ~azure.mgmt.storage.v2021_06_01.models.EncryptionServices + :param key_source: Required. The encryption keySource (provider). Possible values + (case-insensitive): Microsoft.Storage, Microsoft.Keyvault. Possible values include: + "Microsoft.Storage", "Microsoft.Keyvault". Default value: "Microsoft.Storage". + :type key_source: str or ~azure.mgmt.storage.v2021_06_01.models.KeySource + :param require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :type require_infrastructure_encryption: bool + :param key_vault_properties: Properties provided by key vault. + :type key_vault_properties: ~azure.mgmt.storage.v2021_06_01.models.KeyVaultProperties + :param encryption_identity: The identity to be used with service-side encryption at rest. + :type encryption_identity: ~azure.mgmt.storage.v2021_06_01.models.EncryptionIdentity + """ + + _validation = { + 'key_source': {'required': True}, + } + + _attribute_map = { + 'services': {'key': 'services', 'type': 'EncryptionServices'}, + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'bool'}, + 'key_vault_properties': {'key': 'keyvaultproperties', 'type': 'KeyVaultProperties'}, + 'encryption_identity': {'key': 'identity', 'type': 'EncryptionIdentity'}, + } + + def __init__( + self, + *, + key_source: Union[str, "KeySource"] = "Microsoft.Storage", + services: Optional["EncryptionServices"] = None, + require_infrastructure_encryption: Optional[bool] = None, + key_vault_properties: Optional["KeyVaultProperties"] = None, + encryption_identity: Optional["EncryptionIdentity"] = None, + **kwargs + ): + super(Encryption, self).__init__(**kwargs) + self.services = services + self.key_source = key_source + self.require_infrastructure_encryption = require_infrastructure_encryption + self.key_vault_properties = key_vault_properties + self.encryption_identity = encryption_identity + + +class EncryptionIdentity(msrest.serialization.Model): + """Encryption identity for the storage account. + + :param encryption_user_assigned_identity: Resource identifier of the UserAssigned identity to + be associated with server-side encryption on the storage account. + :type encryption_user_assigned_identity: str + """ + + _attribute_map = { + 'encryption_user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + encryption_user_assigned_identity: Optional[str] = None, + **kwargs + ): + super(EncryptionIdentity, self).__init__(**kwargs) + self.encryption_user_assigned_identity = encryption_user_assigned_identity + + +class EncryptionScope(Resource): + """The Encryption Scope resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param source: The provider for the encryption scope. Possible values (case-insensitive): + Microsoft.Storage, Microsoft.KeyVault. Possible values include: "Microsoft.Storage", + "Microsoft.KeyVault". + :type source: str or ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeSource + :param state: The state of the encryption scope. Possible values (case-insensitive): Enabled, + Disabled. Possible values include: "Enabled", "Disabled". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeState + :ivar creation_time: Gets the creation date and time of the encryption scope in UTC. + :vartype creation_time: ~datetime.datetime + :ivar last_modified_time: Gets the last modification date and time of the encryption scope in + UTC. + :vartype last_modified_time: ~datetime.datetime + :param key_vault_properties: The key vault properties for the encryption scope. This is a + required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. + :type key_vault_properties: + ~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeKeyVaultProperties + :param require_infrastructure_encryption: A boolean indicating whether or not the service + applies a secondary layer of encryption with platform managed keys for data at rest. + :type require_infrastructure_encryption: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source': {'key': 'properties.source', 'type': 'str'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'EncryptionScopeKeyVaultProperties'}, + 'require_infrastructure_encryption': {'key': 'properties.requireInfrastructureEncryption', 'type': 'bool'}, + } + + def __init__( + self, + *, + source: Optional[Union[str, "EncryptionScopeSource"]] = None, + state: Optional[Union[str, "EncryptionScopeState"]] = None, + key_vault_properties: Optional["EncryptionScopeKeyVaultProperties"] = None, + require_infrastructure_encryption: Optional[bool] = None, + **kwargs + ): + super(EncryptionScope, self).__init__(**kwargs) + self.source = source + self.state = state + self.creation_time = None + self.last_modified_time = None + self.key_vault_properties = key_vault_properties + self.require_infrastructure_encryption = require_infrastructure_encryption + + +class EncryptionScopeKeyVaultProperties(msrest.serialization.Model): + """The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param key_uri: The object identifier for a key vault key object. When applied, the encryption + scope will use the key referenced by the identifier to enable customer-managed key support on + this encryption scope. + :type key_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + """ + + _validation = { + 'current_versioned_key_identifier': {'readonly': True}, + 'last_key_rotation_timestamp': {'readonly': True}, + } + + _attribute_map = { + 'key_uri': {'key': 'keyUri', 'type': 'str'}, + 'current_versioned_key_identifier': {'key': 'currentVersionedKeyIdentifier', 'type': 'str'}, + 'last_key_rotation_timestamp': {'key': 'lastKeyRotationTimestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + key_uri: Optional[str] = None, + **kwargs + ): + super(EncryptionScopeKeyVaultProperties, self).__init__(**kwargs) + self.key_uri = key_uri + self.current_versioned_key_identifier = None + self.last_key_rotation_timestamp = None + + +class EncryptionScopeListResult(msrest.serialization.Model): + """List of encryption scopes requested, and if paging is required, a URL to the next page of encryption scopes. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of encryption scopes requested. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.EncryptionScope] + :ivar next_link: Request URL that can be used to query next page of encryption scopes. Returned + when total number of requested encryption scopes exceeds the maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[EncryptionScope]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionScopeListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class EncryptionService(msrest.serialization.Model): + """A service that allows server-side encryption to be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param enabled: A boolean indicating whether or not the service encrypts the data as it is + stored. + :type enabled: bool + :ivar last_enabled_time: Gets a rough estimate of the date/time when the encryption was last + enabled by the user. Only returned when encryption is enabled. There might be some unencrypted + blobs which were written after this time, as it is just a rough estimate. + :vartype last_enabled_time: ~datetime.datetime + :param key_type: Encryption key type to be used for the encryption service. 'Account' key type + implies that an account-scoped encryption key will be used. 'Service' key type implies that a + default service key is used. Possible values include: "Service", "Account". + :type key_type: str or ~azure.mgmt.storage.v2021_06_01.models.KeyType + """ + + _validation = { + 'last_enabled_time': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'last_enabled_time': {'key': 'lastEnabledTime', 'type': 'iso-8601'}, + 'key_type': {'key': 'keyType', 'type': 'str'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + key_type: Optional[Union[str, "KeyType"]] = None, + **kwargs + ): + super(EncryptionService, self).__init__(**kwargs) + self.enabled = enabled + self.last_enabled_time = None + self.key_type = key_type + + +class EncryptionServices(msrest.serialization.Model): + """A list of services that support encryption. + + :param blob: The encryption function of the blob storage service. + :type blob: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param file: The encryption function of the file storage service. + :type file: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param table: The encryption function of the table storage service. + :type table: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + :param queue: The encryption function of the queue storage service. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.EncryptionService + """ + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'EncryptionService'}, + 'file': {'key': 'file', 'type': 'EncryptionService'}, + 'table': {'key': 'table', 'type': 'EncryptionService'}, + 'queue': {'key': 'queue', 'type': 'EncryptionService'}, + } + + def __init__( + self, + *, + blob: Optional["EncryptionService"] = None, + file: Optional["EncryptionService"] = None, + table: Optional["EncryptionService"] = None, + queue: Optional["EncryptionService"] = None, + **kwargs + ): + super(EncryptionServices, self).__init__(**kwargs) + self.blob = blob + self.file = file + self.table = table + self.queue = queue + + +class Endpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + :param microsoft_endpoints: Gets the microsoft routing storage endpoints. + :type microsoft_endpoints: + ~azure.mgmt.storage.v2021_06_01.models.StorageAccountMicrosoftEndpoints + :param internet_endpoints: Gets the internet routing storage endpoints. + :type internet_endpoints: + ~azure.mgmt.storage.v2021_06_01.models.StorageAccountInternetEndpoints + """ + + _validation = { + 'blob': {'readonly': True}, + 'queue': {'readonly': True}, + 'table': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'queue': {'key': 'queue', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + 'microsoft_endpoints': {'key': 'microsoftEndpoints', 'type': 'StorageAccountMicrosoftEndpoints'}, + 'internet_endpoints': {'key': 'internetEndpoints', 'type': 'StorageAccountInternetEndpoints'}, + } + + def __init__( + self, + *, + microsoft_endpoints: Optional["StorageAccountMicrosoftEndpoints"] = None, + internet_endpoints: Optional["StorageAccountInternetEndpoints"] = None, + **kwargs + ): + super(Endpoints, self).__init__(**kwargs) + self.blob = None + self.queue = None + self.table = None + self.file = None + self.web = None + self.dfs = None + self.microsoft_endpoints = microsoft_endpoints + self.internet_endpoints = internet_endpoints + + +class ErrorResponse(msrest.serialization.Model): + """An error response from the storage resource provider. + + :param error: Azure Storage Resource Provider error response body. + :type error: ~azure.mgmt.storage.v2021_06_01.models.ErrorResponseBody + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponseBody'}, + } + + def __init__( + self, + *, + error: Optional["ErrorResponseBody"] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class ErrorResponseBody(msrest.serialization.Model): + """Error response body contract. + + :param code: An identifier for the error. Codes are invariant and are intended to be consumed + programmatically. + :type code: str + :param message: A message describing the error, intended to be suitable for display in a user + interface. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + super(ErrorResponseBody, self).__init__(**kwargs) + self.code = code + self.message = message + + +class ExtendedLocation(msrest.serialization.Model): + """The complex type of the extended location. + + :param name: The name of the extended location. + :type name: str + :param type: The type of the extended location. Possible values include: "EdgeZone". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocationTypes + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[Union[str, "ExtendedLocationTypes"]] = None, + **kwargs + ): + super(ExtendedLocation, self).__init__(**kwargs) + self.name = name + self.type = type + + +class FileServiceItems(msrest.serialization.Model): + """FileServiceItems. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of file services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServiceItems, self).__init__(**kwargs) + self.value = None + + +class FileServiceProperties(Resource): + """The properties of File services in storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar sku: Sku name and tier. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param cors: Specifies CORS rules for the File service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the File service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + :param share_delete_retention_policy: The file service properties for share soft delete. + :type share_delete_retention_policy: + ~azure.mgmt.storage.v2021_06_01.models.DeleteRetentionPolicy + :param protocol_settings: Protocol settings for file service. + :type protocol_settings: ~azure.mgmt.storage.v2021_06_01.models.ProtocolSettings + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'sku': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + 'share_delete_retention_policy': {'key': 'properties.shareDeleteRetentionPolicy', 'type': 'DeleteRetentionPolicy'}, + 'protocol_settings': {'key': 'properties.protocolSettings', 'type': 'ProtocolSettings'}, + } + + def __init__( + self, + *, + cors: Optional["CorsRules"] = None, + share_delete_retention_policy: Optional["DeleteRetentionPolicy"] = None, + protocol_settings: Optional["ProtocolSettings"] = None, + **kwargs + ): + super(FileServiceProperties, self).__init__(**kwargs) + self.sku = None + self.cors = cors + self.share_delete_retention_policy = share_delete_retention_policy + self.protocol_settings = protocol_settings + + +class FileShare(AzureEntityResource): + """Properties of the file share, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar last_modified_time: Returns the date and time the share was last modified. + :vartype last_modified_time: ~datetime.datetime + :param metadata: A name-value pair to associate with the share as metadata. + :type metadata: dict[str, str] + :param share_quota: The maximum size of the share, in gigabytes. Must be greater than 0, and + less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. + :type share_quota: int + :param enabled_protocols: The authentication protocol that is used for the file share. Can only + be specified when creating a share. Possible values include: "SMB", "NFS". + :type enabled_protocols: str or ~azure.mgmt.storage.v2021_06_01.models.EnabledProtocols + :param root_squash: The property is for NFS share only. The default is NoRootSquash. Possible + values include: "NoRootSquash", "RootSquash", "AllSquash". + :type root_squash: str or ~azure.mgmt.storage.v2021_06_01.models.RootSquashType + :ivar version: The version of the share. + :vartype version: str + :ivar deleted: Indicates whether the share was deleted. + :vartype deleted: bool + :ivar deleted_time: The deleted time if the share was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. + :vartype remaining_retention_days: int + :param access_tier: Access tier for specific share. GpV2 account can choose between + TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Possible + values include: "TransactionOptimized", "Hot", "Cool", "Premium". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.ShareAccessTier + :ivar access_tier_change_time: Indicates the last modification time for share access tier. + :vartype access_tier_change_time: ~datetime.datetime + :ivar access_tier_status: Indicates if there is a pending transition for access tier. + :vartype access_tier_status: str + :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this + value may not include all recently created or recently resized files. + :vartype share_usage_bytes: long + :ivar lease_status: The lease status of the share. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the share. Possible values include: "Available", "Leased", + "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, + only when the share is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param signed_identifiers: List of stored access policies specified on the share. + :type signed_identifiers: list[~azure.mgmt.storage.v2021_06_01.models.SignedIdentifier] + :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares + with expand param "snapshots". + :vartype snapshot_time: ~datetime.datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'share_quota': {'maximum': 102400, 'minimum': 1}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'access_tier_change_time': {'readonly': True}, + 'access_tier_status': {'readonly': True}, + 'share_usage_bytes': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'snapshot_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'share_quota': {'key': 'properties.shareQuota', 'type': 'int'}, + 'enabled_protocols': {'key': 'properties.enabledProtocols', 'type': 'str'}, + 'root_squash': {'key': 'properties.rootSquash', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'access_tier_change_time': {'key': 'properties.accessTierChangeTime', 'type': 'iso-8601'}, + 'access_tier_status': {'key': 'properties.accessTierStatus', 'type': 'str'}, + 'share_usage_bytes': {'key': 'properties.shareUsageBytes', 'type': 'long'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'signed_identifiers': {'key': 'properties.signedIdentifiers', 'type': '[SignedIdentifier]'}, + 'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + metadata: Optional[Dict[str, str]] = None, + share_quota: Optional[int] = None, + enabled_protocols: Optional[Union[str, "EnabledProtocols"]] = None, + root_squash: Optional[Union[str, "RootSquashType"]] = None, + access_tier: Optional[Union[str, "ShareAccessTier"]] = None, + signed_identifiers: Optional[List["SignedIdentifier"]] = None, + **kwargs + ): + super(FileShare, self).__init__(**kwargs) + self.last_modified_time = None + self.metadata = metadata + self.share_quota = share_quota + self.enabled_protocols = enabled_protocols + self.root_squash = root_squash + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.access_tier = access_tier + self.access_tier_change_time = None + self.access_tier_status = None + self.share_usage_bytes = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.signed_identifiers = signed_identifiers + self.snapshot_time = None + + +class FileShareItem(AzureEntityResource): + """The file share properties be listed out. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar last_modified_time: Returns the date and time the share was last modified. + :vartype last_modified_time: ~datetime.datetime + :param metadata: A name-value pair to associate with the share as metadata. + :type metadata: dict[str, str] + :param share_quota: The maximum size of the share, in gigabytes. Must be greater than 0, and + less than or equal to 5TB (5120). For Large File Shares, the maximum size is 102400. + :type share_quota: int + :param enabled_protocols: The authentication protocol that is used for the file share. Can only + be specified when creating a share. Possible values include: "SMB", "NFS". + :type enabled_protocols: str or ~azure.mgmt.storage.v2021_06_01.models.EnabledProtocols + :param root_squash: The property is for NFS share only. The default is NoRootSquash. Possible + values include: "NoRootSquash", "RootSquash", "AllSquash". + :type root_squash: str or ~azure.mgmt.storage.v2021_06_01.models.RootSquashType + :ivar version: The version of the share. + :vartype version: str + :ivar deleted: Indicates whether the share was deleted. + :vartype deleted: bool + :ivar deleted_time: The deleted time if the share was deleted. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for share that was soft deleted. + :vartype remaining_retention_days: int + :param access_tier: Access tier for specific share. GpV2 account can choose between + TransactionOptimized (default), Hot, and Cool. FileStorage account can choose Premium. Possible + values include: "TransactionOptimized", "Hot", "Cool", "Premium". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.ShareAccessTier + :ivar access_tier_change_time: Indicates the last modification time for share access tier. + :vartype access_tier_change_time: ~datetime.datetime + :ivar access_tier_status: Indicates if there is a pending transition for access tier. + :vartype access_tier_status: str + :ivar share_usage_bytes: The approximate size of the data stored on the share. Note that this + value may not include all recently created or recently resized files. + :vartype share_usage_bytes: long + :ivar lease_status: The lease status of the share. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the share. Possible values include: "Available", "Leased", + "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a share is of infinite or fixed duration, + only when the share is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param signed_identifiers: List of stored access policies specified on the share. + :type signed_identifiers: list[~azure.mgmt.storage.v2021_06_01.models.SignedIdentifier] + :ivar snapshot_time: Creation time of share snapshot returned in the response of list shares + with expand param "snapshots". + :vartype snapshot_time: ~datetime.datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'share_quota': {'maximum': 102400, 'minimum': 1}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'access_tier_change_time': {'readonly': True}, + 'access_tier_status': {'readonly': True}, + 'share_usage_bytes': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'snapshot_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'share_quota': {'key': 'properties.shareQuota', 'type': 'int'}, + 'enabled_protocols': {'key': 'properties.enabledProtocols', 'type': 'str'}, + 'root_squash': {'key': 'properties.rootSquash', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'access_tier_change_time': {'key': 'properties.accessTierChangeTime', 'type': 'iso-8601'}, + 'access_tier_status': {'key': 'properties.accessTierStatus', 'type': 'str'}, + 'share_usage_bytes': {'key': 'properties.shareUsageBytes', 'type': 'long'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'signed_identifiers': {'key': 'properties.signedIdentifiers', 'type': '[SignedIdentifier]'}, + 'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + metadata: Optional[Dict[str, str]] = None, + share_quota: Optional[int] = None, + enabled_protocols: Optional[Union[str, "EnabledProtocols"]] = None, + root_squash: Optional[Union[str, "RootSquashType"]] = None, + access_tier: Optional[Union[str, "ShareAccessTier"]] = None, + signed_identifiers: Optional[List["SignedIdentifier"]] = None, + **kwargs + ): + super(FileShareItem, self).__init__(**kwargs) + self.last_modified_time = None + self.metadata = metadata + self.share_quota = share_quota + self.enabled_protocols = enabled_protocols + self.root_squash = root_squash + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.access_tier = access_tier + self.access_tier_change_time = None + self.access_tier_status = None + self.share_usage_bytes = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.signed_identifiers = signed_identifiers + self.snapshot_time = None + + +class FileShareItems(msrest.serialization.Model): + """Response schema. Contains list of shares returned, and if paging is requested or required, a URL to next page of shares. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of file shares returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.FileShareItem] + :ivar next_link: Request URL that can be used to query next page of shares. Returned when total + number of requested shares exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileShareItem]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FileShareItems, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class GeoReplicationStats(msrest.serialization.Model): + """Statistics related to replication for storage account's Blob, Table, Queue and File services. It is only available when geo-redundant replication is enabled for the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: The status of the secondary location. Possible values are: - Live: Indicates that + the secondary location is active and operational. - Bootstrap: Indicates initial + synchronization from the primary location to the secondary location is in progress.This + typically occurs when replication is first enabled. - Unavailable: Indicates that the secondary + location is temporarily unavailable. Possible values include: "Live", "Bootstrap", + "Unavailable". + :vartype status: str or ~azure.mgmt.storage.v2021_06_01.models.GeoReplicationStatus + :ivar last_sync_time: All primary writes preceding this UTC date/time value are guaranteed to + be available for read operations. Primary writes following this point in time may or may not be + available for reads. Element may be default value if value of LastSyncTime is not available, + this can happen if secondary is offline or we are in bootstrap. + :vartype last_sync_time: ~datetime.datetime + :ivar can_failover: A boolean flag which indicates whether or not account failover is supported + for the account. + :vartype can_failover: bool + """ + + _validation = { + 'status': {'readonly': True}, + 'last_sync_time': {'readonly': True}, + 'can_failover': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'last_sync_time': {'key': 'lastSyncTime', 'type': 'iso-8601'}, + 'can_failover': {'key': 'canFailover', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(GeoReplicationStats, self).__init__(**kwargs) + self.status = None + self.last_sync_time = None + self.can_failover = None + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: Required. The identity type. Possible values include: "None", "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.IdentityType + :param user_assigned_identities: Gets or sets a list of key value pairs that describe the set + of User Assigned identities that will be used with this storage account. The key is the ARM + resource identifier of the identity. Only 1 User Assigned identity is permitted here. + :type user_assigned_identities: dict[str, + ~azure.mgmt.storage.v2021_06_01.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + *, + type: Union[str, "IdentityType"], + user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class ImmutabilityPolicy(AzureEntityResource): + """The ImmutabilityPolicy property of a blob container, including Id, resource name, resource type, Etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked + and Unlocked. Possible values include: "Locked", "Unlocked". + :vartype state: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'immutability_period_since_creation_in_days': {'key': 'properties.immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'properties.allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'properties.allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + *, + immutability_period_since_creation_in_days: Optional[int] = None, + allow_protected_append_writes: Optional[bool] = None, + allow_protected_append_writes_all: Optional[bool] = None, + **kwargs + ): + super(ImmutabilityPolicy, self).__init__(**kwargs) + self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days + self.state = None + self.allow_protected_append_writes = allow_protected_append_writes + self.allow_protected_append_writes_all = allow_protected_append_writes_all + + +class ImmutabilityPolicyProperties(msrest.serialization.Model): + """The properties of an ImmutabilityPolicy of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar etag: ImmutabilityPolicy Etag. + :vartype etag: str + :ivar update_history: The ImmutabilityPolicy update history of the blob container. + :vartype update_history: list[~azure.mgmt.storage.v2021_06_01.models.UpdateHistoryProperty] + :param immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :type immutability_period_since_creation_in_days: int + :ivar state: The ImmutabilityPolicy state of a blob container, possible values include: Locked + and Unlocked. Possible values include: "Locked", "Unlocked". + :vartype state: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyState + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'etag': {'readonly': True}, + 'update_history': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'update_history': {'key': 'updateHistory', 'type': '[UpdateHistoryProperty]'}, + 'immutability_period_since_creation_in_days': {'key': 'properties.immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'properties.allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'properties.allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + *, + immutability_period_since_creation_in_days: Optional[int] = None, + allow_protected_append_writes: Optional[bool] = None, + allow_protected_append_writes_all: Optional[bool] = None, + **kwargs + ): + super(ImmutabilityPolicyProperties, self).__init__(**kwargs) + self.etag = None + self.update_history = None + self.immutability_period_since_creation_in_days = immutability_period_since_creation_in_days + self.state = None + self.allow_protected_append_writes = allow_protected_append_writes + self.allow_protected_append_writes_all = allow_protected_append_writes_all + + +class ImmutableStorageAccount(msrest.serialization.Model): + """This property enables and defines account-level immutability. Enabling the feature auto-enables Blob Versioning. + + :param enabled: A boolean flag which enables account-level immutability. All the containers + under such an account have object-level immutability enabled by default. + :type enabled: bool + :param immutability_policy: Specifies the default account-level immutability policy which is + inherited and applied to objects that do not possess an explicit immutability policy at the + object level. The object-level immutability policy has higher precedence than the + container-level immutability policy, which has a higher precedence than the account-level + immutability policy. + :type immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.AccountImmutabilityPolicyProperties + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'immutability_policy': {'key': 'immutabilityPolicy', 'type': 'AccountImmutabilityPolicyProperties'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + immutability_policy: Optional["AccountImmutabilityPolicyProperties"] = None, + **kwargs + ): + super(ImmutableStorageAccount, self).__init__(**kwargs) + self.enabled = enabled + self.immutability_policy = immutability_policy + + +class ImmutableStorageWithVersioning(msrest.serialization.Model): + """Object level immutability properties of the container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param enabled: This is an immutable property, when set to true it enables object level + immutability at the container level. + :type enabled: bool + :ivar time_stamp: Returns the date and time the object level immutability was enabled. + :vartype time_stamp: ~datetime.datetime + :ivar migration_state: This property denotes the container level immutability to object level + immutability migration state. Possible values include: "InProgress", "Completed". + :vartype migration_state: str or ~azure.mgmt.storage.v2021_06_01.models.MigrationState + """ + + _validation = { + 'time_stamp': {'readonly': True}, + 'migration_state': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'}, + 'migration_state': {'key': 'migrationState', 'type': 'str'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + **kwargs + ): + super(ImmutableStorageWithVersioning, self).__init__(**kwargs) + self.enabled = enabled + self.time_stamp = None + self.migration_state = None + + +class IPRule(msrest.serialization.Model): + """IP rule with specific IP or IP range in CIDR format. + + All required parameters must be populated in order to send to Azure. + + :param ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4 + address is allowed. + :type ip_address_or_range: str + :param action: The action of IP ACL rule. The only acceptable values to pass in are None and + "Allow". The default value is None. + :type action: str + """ + + _validation = { + 'ip_address_or_range': {'required': True}, + } + + _attribute_map = { + 'ip_address_or_range': {'key': 'value', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + } + + def __init__( + self, + *, + ip_address_or_range: str, + action: Optional[str] = None, + **kwargs + ): + super(IPRule, self).__init__(**kwargs) + self.ip_address_or_range = ip_address_or_range + self.action = action + + +class KeyCreationTime(msrest.serialization.Model): + """Storage account keys creation time. + + :param key1: + :type key1: ~datetime.datetime + :param key2: + :type key2: ~datetime.datetime + """ + + _attribute_map = { + 'key1': {'key': 'key1', 'type': 'iso-8601'}, + 'key2': {'key': 'key2', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + key1: Optional[datetime.datetime] = None, + key2: Optional[datetime.datetime] = None, + **kwargs + ): + super(KeyCreationTime, self).__init__(**kwargs) + self.key1 = key1 + self.key2 = key2 + + +class KeyPolicy(msrest.serialization.Model): + """KeyPolicy assigned to the storage account. + + All required parameters must be populated in order to send to Azure. + + :param key_expiration_period_in_days: Required. The key expiration period in days. + :type key_expiration_period_in_days: int + """ + + _validation = { + 'key_expiration_period_in_days': {'required': True}, + } + + _attribute_map = { + 'key_expiration_period_in_days': {'key': 'keyExpirationPeriodInDays', 'type': 'int'}, + } + + def __init__( + self, + *, + key_expiration_period_in_days: int, + **kwargs + ): + super(KeyPolicy, self).__init__(**kwargs) + self.key_expiration_period_in_days = key_expiration_period_in_days + + +class KeyVaultProperties(msrest.serialization.Model): + """Properties of key vault. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param key_name: The name of KeyVault key. + :type key_name: str + :param key_version: The version of KeyVault key. + :type key_version: str + :param key_vault_uri: The Uri of KeyVault. + :type key_vault_uri: str + :ivar current_versioned_key_identifier: The object identifier of the current versioned Key + Vault Key in use. + :vartype current_versioned_key_identifier: str + :ivar last_key_rotation_timestamp: Timestamp of last rotation of the Key Vault Key. + :vartype last_key_rotation_timestamp: ~datetime.datetime + """ + + _validation = { + 'current_versioned_key_identifier': {'readonly': True}, + 'last_key_rotation_timestamp': {'readonly': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyname', 'type': 'str'}, + 'key_version': {'key': 'keyversion', 'type': 'str'}, + 'key_vault_uri': {'key': 'keyvaulturi', 'type': 'str'}, + 'current_versioned_key_identifier': {'key': 'currentVersionedKeyIdentifier', 'type': 'str'}, + 'last_key_rotation_timestamp': {'key': 'lastKeyRotationTimestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + key_name: Optional[str] = None, + key_version: Optional[str] = None, + key_vault_uri: Optional[str] = None, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_name = key_name + self.key_version = key_version + self.key_vault_uri = key_vault_uri + self.current_versioned_key_identifier = None + self.last_key_rotation_timestamp = None + + +class LastAccessTimeTrackingPolicy(msrest.serialization.Model): + """The blob service properties for Last access time based tracking policy. + + All required parameters must be populated in order to send to Azure. + + :param enable: Required. When set to true last access time based tracking is enabled. + :type enable: bool + :param name: Name of the policy. The valid value is AccessTimeTracking. This field is currently + read only. Possible values include: "AccessTimeTracking". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.Name + :param tracking_granularity_in_days: The field specifies blob object tracking granularity in + days, typically how often the blob object should be tracked.This field is currently read only + with value as 1. + :type tracking_granularity_in_days: int + :param blob_type: An array of predefined supported blob types. Only blockBlob is the supported + value. This field is currently read only. + :type blob_type: list[str] + """ + + _validation = { + 'enable': {'required': True}, + } + + _attribute_map = { + 'enable': {'key': 'enable', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'tracking_granularity_in_days': {'key': 'trackingGranularityInDays', 'type': 'int'}, + 'blob_type': {'key': 'blobType', 'type': '[str]'}, + } + + def __init__( + self, + *, + enable: bool, + name: Optional[Union[str, "Name"]] = None, + tracking_granularity_in_days: Optional[int] = None, + blob_type: Optional[List[str]] = None, + **kwargs + ): + super(LastAccessTimeTrackingPolicy, self).__init__(**kwargs) + self.enable = enable + self.name = name + self.tracking_granularity_in_days = tracking_granularity_in_days + self.blob_type = blob_type + + +class LeaseContainerRequest(msrest.serialization.Model): + """Lease Container request schema. + + All required parameters must be populated in order to send to Azure. + + :param action: Required. Specifies the lease action. Can be one of the available actions. + Possible values include: "Acquire", "Renew", "Change", "Release", "Break". + :type action: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerRequestAction + :param lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :type lease_id: str + :param break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :type break_period: int + :param lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, + or negative one (-1) for a lease that never expires. + :type lease_duration: int + :param proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :type proposed_lease_id: str + """ + + _validation = { + 'action': {'required': True}, + } + + _attribute_map = { + 'action': {'key': 'action', 'type': 'str'}, + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'break_period': {'key': 'breakPeriod', 'type': 'int'}, + 'lease_duration': {'key': 'leaseDuration', 'type': 'int'}, + 'proposed_lease_id': {'key': 'proposedLeaseId', 'type': 'str'}, + } + + def __init__( + self, + *, + action: Union[str, "LeaseContainerRequestAction"], + lease_id: Optional[str] = None, + break_period: Optional[int] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + **kwargs + ): + super(LeaseContainerRequest, self).__init__(**kwargs) + self.action = action + self.lease_id = lease_id + self.break_period = break_period + self.lease_duration = lease_duration + self.proposed_lease_id = proposed_lease_id + + +class LeaseContainerResponse(msrest.serialization.Model): + """Lease Container response schema. + + :param lease_id: Returned unique lease ID that must be included with any request to delete the + container, or to renew, change, or release the lease. + :type lease_id: str + :param lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :type lease_time_seconds: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'lease_time_seconds': {'key': 'leaseTimeSeconds', 'type': 'str'}, + } + + def __init__( + self, + *, + lease_id: Optional[str] = None, + lease_time_seconds: Optional[str] = None, + **kwargs + ): + super(LeaseContainerResponse, self).__init__(**kwargs) + self.lease_id = lease_id + self.lease_time_seconds = lease_time_seconds + + +class LeaseShareRequest(msrest.serialization.Model): + """Lease Share request schema. + + All required parameters must be populated in order to send to Azure. + + :param action: Required. Specifies the lease action. Can be one of the available actions. + Possible values include: "Acquire", "Renew", "Change", "Release", "Break". + :type action: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseShareAction + :param lease_id: Identifies the lease. Can be specified in any valid GUID string format. + :type lease_id: str + :param break_period: Optional. For a break action, proposed duration the lease should continue + before it is broken, in seconds, between 0 and 60. + :type break_period: int + :param lease_duration: Required for acquire. Specifies the duration of the lease, in seconds, + or negative one (-1) for a lease that never expires. + :type lease_duration: int + :param proposed_lease_id: Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + :type proposed_lease_id: str + """ + + _validation = { + 'action': {'required': True}, + } + + _attribute_map = { + 'action': {'key': 'action', 'type': 'str'}, + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'break_period': {'key': 'breakPeriod', 'type': 'int'}, + 'lease_duration': {'key': 'leaseDuration', 'type': 'int'}, + 'proposed_lease_id': {'key': 'proposedLeaseId', 'type': 'str'}, + } + + def __init__( + self, + *, + action: Union[str, "LeaseShareAction"], + lease_id: Optional[str] = None, + break_period: Optional[int] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + **kwargs + ): + super(LeaseShareRequest, self).__init__(**kwargs) + self.action = action + self.lease_id = lease_id + self.break_period = break_period + self.lease_duration = lease_duration + self.proposed_lease_id = proposed_lease_id + + +class LeaseShareResponse(msrest.serialization.Model): + """Lease Share response schema. + + :param lease_id: Returned unique lease ID that must be included with any request to delete the + share, or to renew, change, or release the lease. + :type lease_id: str + :param lease_time_seconds: Approximate time remaining in the lease period, in seconds. + :type lease_time_seconds: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + 'lease_time_seconds': {'key': 'leaseTimeSeconds', 'type': 'str'}, + } + + def __init__( + self, + *, + lease_id: Optional[str] = None, + lease_time_seconds: Optional[str] = None, + **kwargs + ): + super(LeaseShareResponse, self).__init__(**kwargs) + self.lease_id = lease_id + self.lease_time_seconds = lease_time_seconds + + +class LegalHold(msrest.serialization.Model): + """The LegalHold property of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :param tags: Required. A set of tags. Each tag should be 3 to 23 alphanumeric characters and is + normalized to lower case at SRP. + :type tags: list[str] + :param allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'has_legal_hold': {'readonly': True}, + 'tags': {'required': True}, + } + + _attribute_map = { + 'has_legal_hold': {'key': 'hasLegalHold', 'type': 'bool'}, + 'tags': {'key': 'tags', 'type': '[str]'}, + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + *, + tags: List[str], + allow_protected_append_writes_all: Optional[bool] = None, + **kwargs + ): + super(LegalHold, self).__init__(**kwargs) + self.has_legal_hold = None + self.tags = tags + self.allow_protected_append_writes_all = allow_protected_append_writes_all + + +class LegalHoldProperties(msrest.serialization.Model): + """The LegalHold property of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :param tags: A set of tags. The list of LegalHold tags of a blob container. + :type tags: list[~azure.mgmt.storage.v2021_06_01.models.TagProperty] + :param protected_append_writes_history: Protected append blob writes history. + :type protected_append_writes_history: + ~azure.mgmt.storage.v2021_06_01.models.ProtectedAppendWritesHistory + """ + + _validation = { + 'has_legal_hold': {'readonly': True}, + } + + _attribute_map = { + 'has_legal_hold': {'key': 'hasLegalHold', 'type': 'bool'}, + 'tags': {'key': 'tags', 'type': '[TagProperty]'}, + 'protected_append_writes_history': {'key': 'protectedAppendWritesHistory', 'type': 'ProtectedAppendWritesHistory'}, + } + + def __init__( + self, + *, + tags: Optional[List["TagProperty"]] = None, + protected_append_writes_history: Optional["ProtectedAppendWritesHistory"] = None, + **kwargs + ): + super(LegalHoldProperties, self).__init__(**kwargs) + self.has_legal_hold = None + self.tags = tags + self.protected_append_writes_history = protected_append_writes_history + + +class ListAccountSasResponse(msrest.serialization.Model): + """The List SAS credentials operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar account_sas_token: List SAS credentials of storage account. + :vartype account_sas_token: str + """ + + _validation = { + 'account_sas_token': {'readonly': True}, + } + + _attribute_map = { + 'account_sas_token': {'key': 'accountSasToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListAccountSasResponse, self).__init__(**kwargs) + self.account_sas_token = None + + +class ListBlobInventoryPolicy(msrest.serialization.Model): + """List of blob inventory policies returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blob inventory policies. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BlobInventoryPolicy]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListBlobInventoryPolicy, self).__init__(**kwargs) + self.value = None + + +class ListContainerItem(AzureEntityResource): + """The blob container properties be listed out. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :ivar version: The version of the deleted blob container. + :vartype version: str + :ivar deleted: Indicates whether the blob container was deleted. + :vartype deleted: bool + :ivar deleted_time: Blob container deletion time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: Remaining retention days for soft deleted blob container. + :vartype remaining_retention_days: int + :param default_encryption_scope: Default the container to use specified encryption scope for + all writes. + :type default_encryption_scope: str + :param deny_encryption_scope_override: Block override of encryption scope from the container + default. + :type deny_encryption_scope_override: bool + :param public_access: Specifies whether data in the container may be accessed publicly and the + level of access. Possible values include: "Container", "Blob", "None". + :type public_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicAccess + :ivar last_modified_time: Returns the date and time the container was last modified. + :vartype last_modified_time: ~datetime.datetime + :ivar lease_status: The lease status of the container. Possible values include: "Locked", + "Unlocked". + :vartype lease_status: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseStatus + :ivar lease_state: Lease state of the container. Possible values include: "Available", + "Leased", "Expired", "Breaking", "Broken". + :vartype lease_state: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseState + :ivar lease_duration: Specifies whether the lease on a container is of infinite or fixed + duration, only when the container is leased. Possible values include: "Infinite", "Fixed". + :vartype lease_duration: str or ~azure.mgmt.storage.v2021_06_01.models.LeaseDuration + :param metadata: A name-value pair to associate with the container as metadata. + :type metadata: dict[str, str] + :ivar immutability_policy: The ImmutabilityPolicy property of the container. + :vartype immutability_policy: + ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyProperties + :ivar legal_hold: The LegalHold property of the container. + :vartype legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHoldProperties + :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at + least one existing tag. The hasLegalHold public property is set to false by SRP if all existing + legal hold tags are cleared out. There can be a maximum of 1000 blob containers with + hasLegalHold=true for a given account. + :vartype has_legal_hold: bool + :ivar has_immutability_policy: The hasImmutabilityPolicy public property is set to true by SRP + if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public + property is set to false by SRP if ImmutabilityPolicy has not been created for this container. + :vartype has_immutability_policy: bool + :param immutable_storage_with_versioning: The object level immutability property of the + container. The property is immutable and can only be set to true at the container creation + time. Existing containers must undergo a migration process. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageWithVersioning + :param enable_nfs_v3_root_squash: Enable NFSv3 root squash on blob container. + :type enable_nfs_v3_root_squash: bool + :param enable_nfs_v3_all_squash: Enable NFSv3 all squash on blob container. + :type enable_nfs_v3_all_squash: bool + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'version': {'readonly': True}, + 'deleted': {'readonly': True}, + 'deleted_time': {'readonly': True}, + 'remaining_retention_days': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'lease_status': {'readonly': True}, + 'lease_state': {'readonly': True}, + 'lease_duration': {'readonly': True}, + 'immutability_policy': {'readonly': True}, + 'legal_hold': {'readonly': True}, + 'has_legal_hold': {'readonly': True}, + 'has_immutability_policy': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'deleted': {'key': 'properties.deleted', 'type': 'bool'}, + 'deleted_time': {'key': 'properties.deletedTime', 'type': 'iso-8601'}, + 'remaining_retention_days': {'key': 'properties.remainingRetentionDays', 'type': 'int'}, + 'default_encryption_scope': {'key': 'properties.defaultEncryptionScope', 'type': 'str'}, + 'deny_encryption_scope_override': {'key': 'properties.denyEncryptionScopeOverride', 'type': 'bool'}, + 'public_access': {'key': 'properties.publicAccess', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'lease_status': {'key': 'properties.leaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'properties.leaseState', 'type': 'str'}, + 'lease_duration': {'key': 'properties.leaseDuration', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'immutability_policy': {'key': 'properties.immutabilityPolicy', 'type': 'ImmutabilityPolicyProperties'}, + 'legal_hold': {'key': 'properties.legalHold', 'type': 'LegalHoldProperties'}, + 'has_legal_hold': {'key': 'properties.hasLegalHold', 'type': 'bool'}, + 'has_immutability_policy': {'key': 'properties.hasImmutabilityPolicy', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageWithVersioning'}, + 'enable_nfs_v3_root_squash': {'key': 'properties.enableNfsV3RootSquash', 'type': 'bool'}, + 'enable_nfs_v3_all_squash': {'key': 'properties.enableNfsV3AllSquash', 'type': 'bool'}, + } + + def __init__( + self, + *, + default_encryption_scope: Optional[str] = None, + deny_encryption_scope_override: Optional[bool] = None, + public_access: Optional[Union[str, "PublicAccess"]] = None, + metadata: Optional[Dict[str, str]] = None, + immutable_storage_with_versioning: Optional["ImmutableStorageWithVersioning"] = None, + enable_nfs_v3_root_squash: Optional[bool] = None, + enable_nfs_v3_all_squash: Optional[bool] = None, + **kwargs + ): + super(ListContainerItem, self).__init__(**kwargs) + self.version = None + self.deleted = None + self.deleted_time = None + self.remaining_retention_days = None + self.default_encryption_scope = default_encryption_scope + self.deny_encryption_scope_override = deny_encryption_scope_override + self.public_access = public_access + self.last_modified_time = None + self.lease_status = None + self.lease_state = None + self.lease_duration = None + self.metadata = metadata + self.immutability_policy = None + self.legal_hold = None + self.has_legal_hold = None + self.has_immutability_policy = None + self.immutable_storage_with_versioning = immutable_storage_with_versioning + self.enable_nfs_v3_root_squash = enable_nfs_v3_root_squash + self.enable_nfs_v3_all_squash = enable_nfs_v3_all_squash + + +class ListContainerItems(msrest.serialization.Model): + """Response schema. Contains list of blobs returned, and if paging is requested or required, a URL to next page of containers. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of blobs containers returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.ListContainerItem] + :ivar next_link: Request URL that can be used to query next page of containers. Returned when + total number of requested containers exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ListContainerItem]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListContainerItems, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListQueue(Resource): + """ListQueue. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param metadata: A name-value pair that represents queue metadata. + :type metadata: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + } + + def __init__( + self, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + super(ListQueue, self).__init__(**kwargs) + self.metadata = metadata + + +class ListQueueResource(msrest.serialization.Model): + """Response schema. Contains list of queues returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of queues returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.ListQueue] + :ivar next_link: Request URL that can be used to list next page of queues. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ListQueue]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListQueueResource, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListQueueServices(msrest.serialization.Model): + """ListQueueServices. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of queue services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QueueServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListQueueServices, self).__init__(**kwargs) + self.value = None + + +class ListServiceSasResponse(msrest.serialization.Model): + """The List service SAS credentials operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar service_sas_token: List service SAS credentials of specific resource. + :vartype service_sas_token: str + """ + + _validation = { + 'service_sas_token': {'readonly': True}, + } + + _attribute_map = { + 'service_sas_token': {'key': 'serviceSasToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListServiceSasResponse, self).__init__(**kwargs) + self.service_sas_token = None + + +class ListTableResource(msrest.serialization.Model): + """Response schema. Contains list of tables returned. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of tables returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.Table] + :ivar next_link: Request URL that can be used to query next page of tables. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Table]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListTableResource, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListTableServices(msrest.serialization.Model): + """ListTableServices. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of table services returned. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TableServiceProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListTableServices, self).__init__(**kwargs) + self.value = None + + +class ManagementPolicy(Resource): + """The Get Storage Account ManagementPolicies operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar last_modified_time: Returns the date and time the ManagementPolicies was last modified. + :vartype last_modified_time: ~datetime.datetime + :param policy: The Storage Account ManagementPolicy, in JSON format. See more details in: + https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + :type policy: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicySchema + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'policy': {'key': 'properties.policy', 'type': 'ManagementPolicySchema'}, + } + + def __init__( + self, + *, + policy: Optional["ManagementPolicySchema"] = None, + **kwargs + ): + super(ManagementPolicy, self).__init__(**kwargs) + self.last_modified_time = None + self.policy = policy + + +class ManagementPolicyAction(msrest.serialization.Model): + """Actions are applied to the filtered blobs when the execution condition is met. + + :param base_blob: The management policy action for base blob. + :type base_blob: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyBaseBlob + :param snapshot: The management policy action for snapshot. + :type snapshot: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicySnapShot + :param version: The management policy action for version. + :type version: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyVersion + """ + + _attribute_map = { + 'base_blob': {'key': 'baseBlob', 'type': 'ManagementPolicyBaseBlob'}, + 'snapshot': {'key': 'snapshot', 'type': 'ManagementPolicySnapShot'}, + 'version': {'key': 'version', 'type': 'ManagementPolicyVersion'}, + } + + def __init__( + self, + *, + base_blob: Optional["ManagementPolicyBaseBlob"] = None, + snapshot: Optional["ManagementPolicySnapShot"] = None, + version: Optional["ManagementPolicyVersion"] = None, + **kwargs + ): + super(ManagementPolicyAction, self).__init__(**kwargs) + self.base_blob = base_blob + self.snapshot = snapshot + self.version = version + + +class ManagementPolicyBaseBlob(msrest.serialization.Model): + """Management policy action for base blob. + + :param tier_to_cool: The function to tier blobs to cool storage. Support blobs currently at Hot + tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param tier_to_archive: The function to tier blobs to archive storage. Support blobs currently + at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param delete: The function to delete the blob. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterModification + :param enable_auto_tier_to_hot_from_cool: This property enables auto tiering of a blob from + cool to hot on a blob access. This property requires + tierToCool.daysAfterLastAccessTimeGreaterThan. + :type enable_auto_tier_to_hot_from_cool: bool + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterModification'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterModification'}, + 'delete': {'key': 'delete', 'type': 'DateAfterModification'}, + 'enable_auto_tier_to_hot_from_cool': {'key': 'enableAutoTierToHotFromCool', 'type': 'bool'}, + } + + def __init__( + self, + *, + tier_to_cool: Optional["DateAfterModification"] = None, + tier_to_archive: Optional["DateAfterModification"] = None, + delete: Optional["DateAfterModification"] = None, + enable_auto_tier_to_hot_from_cool: Optional[bool] = None, + **kwargs + ): + super(ManagementPolicyBaseBlob, self).__init__(**kwargs) + self.tier_to_cool = tier_to_cool + self.tier_to_archive = tier_to_archive + self.delete = delete + self.enable_auto_tier_to_hot_from_cool = enable_auto_tier_to_hot_from_cool + + +class ManagementPolicyDefinition(msrest.serialization.Model): + """An object that defines the Lifecycle rule. Each definition is made up with a filters set and an actions set. + + All required parameters must be populated in order to send to Azure. + + :param actions: Required. An object that defines the action set. + :type actions: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyAction + :param filters: An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyFilter + """ + + _validation = { + 'actions': {'required': True}, + } + + _attribute_map = { + 'actions': {'key': 'actions', 'type': 'ManagementPolicyAction'}, + 'filters': {'key': 'filters', 'type': 'ManagementPolicyFilter'}, + } + + def __init__( + self, + *, + actions: "ManagementPolicyAction", + filters: Optional["ManagementPolicyFilter"] = None, + **kwargs + ): + super(ManagementPolicyDefinition, self).__init__(**kwargs) + self.actions = actions + self.filters = filters + + +class ManagementPolicyFilter(msrest.serialization.Model): + """Filters limit rule actions to a subset of blobs within the storage account. If multiple filters are defined, a logical AND is performed on all filters. + + All required parameters must be populated in order to send to Azure. + + :param prefix_match: An array of strings for prefixes to be match. + :type prefix_match: list[str] + :param blob_types: Required. An array of predefined enum values. Currently blockBlob supports + all tiering and delete actions. Only delete actions are supported for appendBlob. + :type blob_types: list[str] + :param blob_index_match: An array of blob index tag based filters, there can be at most 10 tag + filters. + :type blob_index_match: list[~azure.mgmt.storage.v2021_06_01.models.TagFilter] + """ + + _validation = { + 'blob_types': {'required': True}, + } + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'blob_types': {'key': 'blobTypes', 'type': '[str]'}, + 'blob_index_match': {'key': 'blobIndexMatch', 'type': '[TagFilter]'}, + } + + def __init__( + self, + *, + blob_types: List[str], + prefix_match: Optional[List[str]] = None, + blob_index_match: Optional[List["TagFilter"]] = None, + **kwargs + ): + super(ManagementPolicyFilter, self).__init__(**kwargs) + self.prefix_match = prefix_match + self.blob_types = blob_types + self.blob_index_match = blob_index_match + + +class ManagementPolicyRule(msrest.serialization.Model): + """An object that wraps the Lifecycle rule. Each rule is uniquely defined by name. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Rule is enabled if set to true. + :type enabled: bool + :param name: Required. A rule name can contain any combination of alpha numeric characters. + Rule name is case-sensitive. It must be unique within a policy. + :type name: str + :param type: Required. The valid value is Lifecycle. Possible values include: "Lifecycle". + :type type: str or ~azure.mgmt.storage.v2021_06_01.models.RuleType + :param definition: Required. An object that defines the Lifecycle rule. + :type definition: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyDefinition + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'definition': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'ManagementPolicyDefinition'}, + } + + def __init__( + self, + *, + name: str, + type: Union[str, "RuleType"], + definition: "ManagementPolicyDefinition", + enabled: Optional[bool] = None, + **kwargs + ): + super(ManagementPolicyRule, self).__init__(**kwargs) + self.enabled = enabled + self.name = name + self.type = type + self.definition = definition + + +class ManagementPolicySchema(msrest.serialization.Model): + """The Storage Account ManagementPolicies Rules. See more details in: https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + + All required parameters must be populated in order to send to Azure. + + :param rules: Required. The Storage Account ManagementPolicies Rules. See more details in: + https://docs.microsoft.com/en-us/azure/storage/common/storage-lifecycle-managment-concepts. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyRule] + """ + + _validation = { + 'rules': {'required': True}, + } + + _attribute_map = { + 'rules': {'key': 'rules', 'type': '[ManagementPolicyRule]'}, + } + + def __init__( + self, + *, + rules: List["ManagementPolicyRule"], + **kwargs + ): + super(ManagementPolicySchema, self).__init__(**kwargs) + self.rules = rules + + +class ManagementPolicySnapShot(msrest.serialization.Model): + """Management policy action for snapshot. + + :param tier_to_cool: The function to tier blob snapshot to cool storage. Support blob snapshot + currently at Hot tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param tier_to_archive: The function to tier blob snapshot to archive storage. Support blob + snapshot currently at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param delete: The function to delete the blob snapshot. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterCreation'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterCreation'}, + 'delete': {'key': 'delete', 'type': 'DateAfterCreation'}, + } + + def __init__( + self, + *, + tier_to_cool: Optional["DateAfterCreation"] = None, + tier_to_archive: Optional["DateAfterCreation"] = None, + delete: Optional["DateAfterCreation"] = None, + **kwargs + ): + super(ManagementPolicySnapShot, self).__init__(**kwargs) + self.tier_to_cool = tier_to_cool + self.tier_to_archive = tier_to_archive + self.delete = delete + + +class ManagementPolicyVersion(msrest.serialization.Model): + """Management policy action for blob version. + + :param tier_to_cool: The function to tier blob version to cool storage. Support blob version + currently at Hot tier. + :type tier_to_cool: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param tier_to_archive: The function to tier blob version to archive storage. Support blob + version currently at Hot or Cool tier. + :type tier_to_archive: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + :param delete: The function to delete the blob version. + :type delete: ~azure.mgmt.storage.v2021_06_01.models.DateAfterCreation + """ + + _attribute_map = { + 'tier_to_cool': {'key': 'tierToCool', 'type': 'DateAfterCreation'}, + 'tier_to_archive': {'key': 'tierToArchive', 'type': 'DateAfterCreation'}, + 'delete': {'key': 'delete', 'type': 'DateAfterCreation'}, + } + + def __init__( + self, + *, + tier_to_cool: Optional["DateAfterCreation"] = None, + tier_to_archive: Optional["DateAfterCreation"] = None, + delete: Optional["DateAfterCreation"] = None, + **kwargs + ): + super(ManagementPolicyVersion, self).__init__(**kwargs) + self.tier_to_cool = tier_to_cool + self.tier_to_archive = tier_to_archive + self.delete = delete + + +class MetricSpecification(msrest.serialization.Model): + """Metric specification of operation. + + :param name: Name of metric specification. + :type name: str + :param display_name: Display name of metric specification. + :type display_name: str + :param display_description: Display description of metric specification. + :type display_description: str + :param unit: Unit could be Bytes or Count. + :type unit: str + :param dimensions: Dimensions of blobs, including blob type and access tier. + :type dimensions: list[~azure.mgmt.storage.v2021_06_01.models.Dimension] + :param aggregation_type: Aggregation type could be Average. + :type aggregation_type: str + :param fill_gap_with_zero: The property to decide fill gap with zero or not. + :type fill_gap_with_zero: bool + :param category: The category this metric specification belong to, could be Capacity. + :type category: str + :param resource_id_dimension_name_override: Account Resource Id. + :type resource_id_dimension_name_override: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'dimensions': {'key': 'dimensions', 'type': '[Dimension]'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'}, + 'category': {'key': 'category', 'type': 'str'}, + 'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display_name: Optional[str] = None, + display_description: Optional[str] = None, + unit: Optional[str] = None, + dimensions: Optional[List["Dimension"]] = None, + aggregation_type: Optional[str] = None, + fill_gap_with_zero: Optional[bool] = None, + category: Optional[str] = None, + resource_id_dimension_name_override: Optional[str] = None, + **kwargs + ): + super(MetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.dimensions = dimensions + self.aggregation_type = aggregation_type + self.fill_gap_with_zero = fill_gap_with_zero + self.category = category + self.resource_id_dimension_name_override = resource_id_dimension_name_override + + +class Multichannel(msrest.serialization.Model): + """Multichannel setting. Applies to Premium FileStorage only. + + :param enabled: Indicates whether multichannel is enabled. + :type enabled: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + **kwargs + ): + super(Multichannel, self).__init__(**kwargs) + self.enabled = enabled + + +class NetworkRuleSet(msrest.serialization.Model): + """Network rule set. + + All required parameters must be populated in order to send to Azure. + + :param bypass: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. + Possible values are any combination of Logging|Metrics|AzureServices (For example, "Logging, + Metrics"), or None to bypass none of those traffics. Possible values include: "None", + "Logging", "Metrics", "AzureServices". Default value: "AzureServices". + :type bypass: str or ~azure.mgmt.storage.v2021_06_01.models.Bypass + :param resource_access_rules: Sets the resource access rules. + :type resource_access_rules: list[~azure.mgmt.storage.v2021_06_01.models.ResourceAccessRule] + :param virtual_network_rules: Sets the virtual network rules. + :type virtual_network_rules: list[~azure.mgmt.storage.v2021_06_01.models.VirtualNetworkRule] + :param ip_rules: Sets the IP ACL rules. + :type ip_rules: list[~azure.mgmt.storage.v2021_06_01.models.IPRule] + :param default_action: Required. Specifies the default action of allow or deny when no other + rules match. Possible values include: "Allow", "Deny". Default value: "Allow". + :type default_action: str or ~azure.mgmt.storage.v2021_06_01.models.DefaultAction + """ + + _validation = { + 'default_action': {'required': True}, + } + + _attribute_map = { + 'bypass': {'key': 'bypass', 'type': 'str'}, + 'resource_access_rules': {'key': 'resourceAccessRules', 'type': '[ResourceAccessRule]'}, + 'virtual_network_rules': {'key': 'virtualNetworkRules', 'type': '[VirtualNetworkRule]'}, + 'ip_rules': {'key': 'ipRules', 'type': '[IPRule]'}, + 'default_action': {'key': 'defaultAction', 'type': 'str'}, + } + + def __init__( + self, + *, + default_action: Union[str, "DefaultAction"] = "Allow", + bypass: Optional[Union[str, "Bypass"]] = "AzureServices", + resource_access_rules: Optional[List["ResourceAccessRule"]] = None, + virtual_network_rules: Optional[List["VirtualNetworkRule"]] = None, + ip_rules: Optional[List["IPRule"]] = None, + **kwargs + ): + super(NetworkRuleSet, self).__init__(**kwargs) + self.bypass = bypass + self.resource_access_rules = resource_access_rules + self.virtual_network_rules = virtual_network_rules + self.ip_rules = ip_rules + self.default_action = default_action + + +class ObjectReplicationPolicies(msrest.serialization.Model): + """List storage account object replication policies. + + :param value: The replication policy between two storage accounts. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ObjectReplicationPolicy]'}, + } + + def __init__( + self, + *, + value: Optional[List["ObjectReplicationPolicy"]] = None, + **kwargs + ): + super(ObjectReplicationPolicies, self).__init__(**kwargs) + self.value = value + + +class ObjectReplicationPolicy(Resource): + """The replication policy between two storage accounts. Multiple rules can be defined in one policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar policy_id: A unique id for object replication policy. + :vartype policy_id: str + :ivar enabled_time: Indicates when the policy is enabled on the source account. + :vartype enabled_time: ~datetime.datetime + :param source_account: Required. Source account name. It should be full resource id if + allowCrossTenantReplication set to false. + :type source_account: str + :param destination_account: Required. Destination account name. It should be full resource id + if allowCrossTenantReplication set to false. + :type destination_account: str + :param rules: The storage account object replication rules. + :type rules: list[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicyRule] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'policy_id': {'readonly': True}, + 'enabled_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'policy_id': {'key': 'properties.policyId', 'type': 'str'}, + 'enabled_time': {'key': 'properties.enabledTime', 'type': 'iso-8601'}, + 'source_account': {'key': 'properties.sourceAccount', 'type': 'str'}, + 'destination_account': {'key': 'properties.destinationAccount', 'type': 'str'}, + 'rules': {'key': 'properties.rules', 'type': '[ObjectReplicationPolicyRule]'}, + } + + def __init__( + self, + *, + source_account: Optional[str] = None, + destination_account: Optional[str] = None, + rules: Optional[List["ObjectReplicationPolicyRule"]] = None, + **kwargs + ): + super(ObjectReplicationPolicy, self).__init__(**kwargs) + self.policy_id = None + self.enabled_time = None + self.source_account = source_account + self.destination_account = destination_account + self.rules = rules + + +class ObjectReplicationPolicyFilter(msrest.serialization.Model): + """Filters limit replication to a subset of blobs within the storage account. A logical OR is performed on values in the filter. If multiple filters are defined, a logical AND is performed on all filters. + + :param prefix_match: Optional. Filters the results to replicate only blobs whose names begin + with the specified prefix. + :type prefix_match: list[str] + :param min_creation_time: Blobs created after the time will be replicated to the destination. + It must be in datetime format 'yyyy-MM-ddTHH:mm:ssZ'. Example: 2020-02-19T16:05:00Z. + :type min_creation_time: str + """ + + _attribute_map = { + 'prefix_match': {'key': 'prefixMatch', 'type': '[str]'}, + 'min_creation_time': {'key': 'minCreationTime', 'type': 'str'}, + } + + def __init__( + self, + *, + prefix_match: Optional[List[str]] = None, + min_creation_time: Optional[str] = None, + **kwargs + ): + super(ObjectReplicationPolicyFilter, self).__init__(**kwargs) + self.prefix_match = prefix_match + self.min_creation_time = min_creation_time + + +class ObjectReplicationPolicyRule(msrest.serialization.Model): + """The replication policy rule between two containers. + + All required parameters must be populated in order to send to Azure. + + :param rule_id: Rule Id is auto-generated for each new rule on destination account. It is + required for put policy on source account. + :type rule_id: str + :param source_container: Required. Required. Source container name. + :type source_container: str + :param destination_container: Required. Required. Destination container name. + :type destination_container: str + :param filters: Optional. An object that defines the filter set. + :type filters: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicyFilter + """ + + _validation = { + 'source_container': {'required': True}, + 'destination_container': {'required': True}, + } + + _attribute_map = { + 'rule_id': {'key': 'ruleId', 'type': 'str'}, + 'source_container': {'key': 'sourceContainer', 'type': 'str'}, + 'destination_container': {'key': 'destinationContainer', 'type': 'str'}, + 'filters': {'key': 'filters', 'type': 'ObjectReplicationPolicyFilter'}, + } + + def __init__( + self, + *, + source_container: str, + destination_container: str, + rule_id: Optional[str] = None, + filters: Optional["ObjectReplicationPolicyFilter"] = None, + **kwargs + ): + super(ObjectReplicationPolicyRule, self).__init__(**kwargs) + self.rule_id = rule_id + self.source_container = source_container + self.destination_container = destination_container + self.filters = filters + + +class Operation(msrest.serialization.Model): + """Storage REST API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display metadata associated with the operation. + :type display: ~azure.mgmt.storage.v2021_06_01.models.OperationDisplay + :param origin: The origin of operations. + :type origin: str + :param service_specification: One property of operation, include metric specifications. + :type service_specification: ~azure.mgmt.storage.v2021_06_01.models.ServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'ServiceSpecification'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display: Optional["OperationDisplay"] = None, + origin: Optional[str] = None, + service_specification: Optional["ServiceSpecification"] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.display = display + self.origin = origin + self.service_specification = service_specification + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + :param provider: Service provider: Microsoft Storage. + :type provider: str + :param resource: Resource on which the operation is performed etc. + :type resource: str + :param operation: Type of operation: get, read, delete, etc. + :type operation: str + :param description: Description of the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list Storage operations. It contains a list of operations and a URL link to get the next set of results. + + :param value: List of Storage operations supported by the Storage resource provider. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + *, + value: Optional[List["Operation"]] = None, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateEndpoint(msrest.serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(Resource): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: + ~azure.mgmt.storage.v2021_06_01.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Possible values include: "Succeeded", "Creating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateEndpointConnectionListResult(msrest.serialization.Model): + """List of private endpoint connection associated with the specified storage account. + + :param value: Array of private endpoint connections. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateEndpointConnection"]] = None, + **kwargs + ): + super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = required_zone_names + + +class PrivateLinkResourceListResult(msrest.serialization.Model): + """A list of private link resources. + + :param value: Array of private link resources. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateLinkResource"]] = None, + **kwargs + ): + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected". + :type status: str or + ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param action_required: A message indicating if changes on the service provider require any + updates on the consumer. + :type action_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'action_required': {'key': 'actionRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + action_required: Optional[str] = None, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.action_required = action_required + + +class ProtectedAppendWritesHistory(msrest.serialization.Model): + """Protected append writes history setting for the blob container with Legal holds. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param allow_protected_append_writes_all: When enabled, new blocks can be written to both + 'Append and Bock Blobs' while maintaining legal hold protection and compliance. Only new blocks + can be added and any existing blocks cannot be modified or deleted. + :type allow_protected_append_writes_all: bool + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + """ + + _validation = { + 'timestamp': {'readonly': True}, + } + + _attribute_map = { + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + allow_protected_append_writes_all: Optional[bool] = None, + **kwargs + ): + super(ProtectedAppendWritesHistory, self).__init__(**kwargs) + self.allow_protected_append_writes_all = allow_protected_append_writes_all + self.timestamp = None + + +class ProtocolSettings(msrest.serialization.Model): + """Protocol settings for file service. + + :param smb: Setting for SMB protocol. + :type smb: ~azure.mgmt.storage.v2021_06_01.models.SmbSetting + """ + + _attribute_map = { + 'smb': {'key': 'smb', 'type': 'SmbSetting'}, + } + + def __init__( + self, + *, + smb: Optional["SmbSetting"] = None, + **kwargs + ): + super(ProtocolSettings, self).__init__(**kwargs) + self.smb = smb + + +class QueueServiceProperties(Resource): + """The properties of a storage account’s Queue service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param cors: Specifies CORS rules for the Queue service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Queue service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + } + + def __init__( + self, + *, + cors: Optional["CorsRules"] = None, + **kwargs + ): + super(QueueServiceProperties, self).__init__(**kwargs) + self.cors = cors + + +class ResourceAccessRule(msrest.serialization.Model): + """Resource Access Rule. + + :param tenant_id: Tenant Id. + :type tenant_id: str + :param resource_id: Resource Id. + :type resource_id: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + tenant_id: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + super(ResourceAccessRule, self).__init__(**kwargs) + self.tenant_id = tenant_id + self.resource_id = resource_id + + +class RestorePolicyProperties(msrest.serialization.Model): + """The blob service properties for blob restore policy. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Blob restore is enabled if set to true. + :type enabled: bool + :param days: how long this blob can be restored. It should be great than zero and less than + DeleteRetentionPolicy.days. + :type days: int + :ivar last_enabled_time: Deprecated in favor of minRestoreTime property. + :vartype last_enabled_time: ~datetime.datetime + :ivar min_restore_time: Returns the minimum date and time that the restore can be started. + :vartype min_restore_time: ~datetime.datetime + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'maximum': 365, 'minimum': 1}, + 'last_enabled_time': {'readonly': True}, + 'min_restore_time': {'readonly': True}, + } + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'days': {'key': 'days', 'type': 'int'}, + 'last_enabled_time': {'key': 'lastEnabledTime', 'type': 'iso-8601'}, + 'min_restore_time': {'key': 'minRestoreTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + **kwargs + ): + super(RestorePolicyProperties, self).__init__(**kwargs) + self.enabled = enabled + self.days = days + self.last_enabled_time = None + self.min_restore_time = None + + +class Restriction(msrest.serialization.Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The type of restrictions. As of now only possible value for this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to location. This would + be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. As of now this can be "QuotaId" or + "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the + subscription does not belong to that quota. The "NotAvailableForSubscription" is related to + capacity at DC. Possible values include: "QuotaId", "NotAvailableForSubscription". + :type reason_code: str or ~azure.mgmt.storage.v2021_06_01.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__( + self, + *, + reason_code: Optional[Union[str, "ReasonCode"]] = None, + **kwargs + ): + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = reason_code + + +class RoutingPreference(msrest.serialization.Model): + """Routing preference defines the type of network, either microsoft or internet routing to be used to deliver the user data, the default option is microsoft routing. + + :param routing_choice: Routing Choice defines the kind of network routing opted by the user. + Possible values include: "MicrosoftRouting", "InternetRouting". + :type routing_choice: str or ~azure.mgmt.storage.v2021_06_01.models.RoutingChoice + :param publish_microsoft_endpoints: A boolean flag which indicates whether microsoft routing + storage endpoints are to be published. + :type publish_microsoft_endpoints: bool + :param publish_internet_endpoints: A boolean flag which indicates whether internet routing + storage endpoints are to be published. + :type publish_internet_endpoints: bool + """ + + _attribute_map = { + 'routing_choice': {'key': 'routingChoice', 'type': 'str'}, + 'publish_microsoft_endpoints': {'key': 'publishMicrosoftEndpoints', 'type': 'bool'}, + 'publish_internet_endpoints': {'key': 'publishInternetEndpoints', 'type': 'bool'}, + } + + def __init__( + self, + *, + routing_choice: Optional[Union[str, "RoutingChoice"]] = None, + publish_microsoft_endpoints: Optional[bool] = None, + publish_internet_endpoints: Optional[bool] = None, + **kwargs + ): + super(RoutingPreference, self).__init__(**kwargs) + self.routing_choice = routing_choice + self.publish_microsoft_endpoints = publish_microsoft_endpoints + self.publish_internet_endpoints = publish_internet_endpoints + + +class SasPolicy(msrest.serialization.Model): + """SasPolicy assigned to the storage account. + + All required parameters must be populated in order to send to Azure. + + :param sas_expiration_period: Required. The SAS expiration period, DD.HH:MM:SS. + :type sas_expiration_period: str + :param expiration_action: Required. The SAS expiration action. Can only be Log. Possible values + include: "Log". Default value: "Log". + :type expiration_action: str or ~azure.mgmt.storage.v2021_06_01.models.ExpirationAction + """ + + _validation = { + 'sas_expiration_period': {'required': True}, + 'expiration_action': {'required': True}, + } + + _attribute_map = { + 'sas_expiration_period': {'key': 'sasExpirationPeriod', 'type': 'str'}, + 'expiration_action': {'key': 'expirationAction', 'type': 'str'}, + } + + def __init__( + self, + *, + sas_expiration_period: str, + expiration_action: Union[str, "ExpirationAction"] = "Log", + **kwargs + ): + super(SasPolicy, self).__init__(**kwargs) + self.sas_expiration_period = sas_expiration_period + self.expiration_action = expiration_action + + +class ServiceSasParameters(msrest.serialization.Model): + """The parameters to list service SAS credentials of a specific resource. + + All required parameters must be populated in order to send to Azure. + + :param canonicalized_resource: Required. The canonical path to the signed resource. + :type canonicalized_resource: str + :param resource: The signed services accessible with the service SAS. Possible values include: + Blob (b), Container (c), File (f), Share (s). Possible values include: "b", "c", "f", "s". + :type resource: str or ~azure.mgmt.storage.v2021_06_01.models.SignedResource + :param permissions: The signed permissions for the service SAS. Possible values include: Read + (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Possible + values include: "r", "d", "w", "l", "a", "c", "u", "p". + :type permissions: str or ~azure.mgmt.storage.v2021_06_01.models.Permissions + :param ip_address_or_range: An IP address or a range of IP addresses from which to accept + requests. + :type ip_address_or_range: str + :param protocols: The protocol permitted for a request made with the account SAS. Possible + values include: "https,http", "https". + :type protocols: str or ~azure.mgmt.storage.v2021_06_01.models.HttpProtocol + :param shared_access_start_time: The time at which the SAS becomes valid. + :type shared_access_start_time: ~datetime.datetime + :param shared_access_expiry_time: The time at which the shared access signature becomes + invalid. + :type shared_access_expiry_time: ~datetime.datetime + :param identifier: A unique value up to 64 characters in length that correlates to an access + policy specified for the container, queue, or table. + :type identifier: str + :param partition_key_start: The start of partition key. + :type partition_key_start: str + :param partition_key_end: The end of partition key. + :type partition_key_end: str + :param row_key_start: The start of row key. + :type row_key_start: str + :param row_key_end: The end of row key. + :type row_key_end: str + :param key_to_sign: The key to sign the account SAS token with. + :type key_to_sign: str + :param cache_control: The response header override for cache control. + :type cache_control: str + :param content_disposition: The response header override for content disposition. + :type content_disposition: str + :param content_encoding: The response header override for content encoding. + :type content_encoding: str + :param content_language: The response header override for content language. + :type content_language: str + :param content_type: The response header override for content type. + :type content_type: str + """ + + _validation = { + 'canonicalized_resource': {'required': True}, + 'identifier': {'max_length': 64, 'min_length': 0}, + } + + _attribute_map = { + 'canonicalized_resource': {'key': 'canonicalizedResource', 'type': 'str'}, + 'resource': {'key': 'signedResource', 'type': 'str'}, + 'permissions': {'key': 'signedPermission', 'type': 'str'}, + 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, + 'protocols': {'key': 'signedProtocol', 'type': 'str'}, + 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, + 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, + 'identifier': {'key': 'signedIdentifier', 'type': 'str'}, + 'partition_key_start': {'key': 'startPk', 'type': 'str'}, + 'partition_key_end': {'key': 'endPk', 'type': 'str'}, + 'row_key_start': {'key': 'startRk', 'type': 'str'}, + 'row_key_end': {'key': 'endRk', 'type': 'str'}, + 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, + 'cache_control': {'key': 'rscc', 'type': 'str'}, + 'content_disposition': {'key': 'rscd', 'type': 'str'}, + 'content_encoding': {'key': 'rsce', 'type': 'str'}, + 'content_language': {'key': 'rscl', 'type': 'str'}, + 'content_type': {'key': 'rsct', 'type': 'str'}, + } + + def __init__( + self, + *, + canonicalized_resource: str, + resource: Optional[Union[str, "SignedResource"]] = None, + permissions: Optional[Union[str, "Permissions"]] = None, + ip_address_or_range: Optional[str] = None, + protocols: Optional[Union[str, "HttpProtocol"]] = None, + shared_access_start_time: Optional[datetime.datetime] = None, + shared_access_expiry_time: Optional[datetime.datetime] = None, + identifier: Optional[str] = None, + partition_key_start: Optional[str] = None, + partition_key_end: Optional[str] = None, + row_key_start: Optional[str] = None, + row_key_end: Optional[str] = None, + key_to_sign: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs + ): + super(ServiceSasParameters, self).__init__(**kwargs) + self.canonicalized_resource = canonicalized_resource + self.resource = resource + self.permissions = permissions + self.ip_address_or_range = ip_address_or_range + self.protocols = protocols + self.shared_access_start_time = shared_access_start_time + self.shared_access_expiry_time = shared_access_expiry_time + self.identifier = identifier + self.partition_key_start = partition_key_start + self.partition_key_end = partition_key_end + self.row_key_start = row_key_start + self.row_key_end = row_key_end + self.key_to_sign = key_to_sign + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.content_language = content_language + self.content_type = content_type + + +class ServiceSpecification(msrest.serialization.Model): + """One property of operation, include metric specifications. + + :param metric_specifications: Metric specifications of operation. + :type metric_specifications: list[~azure.mgmt.storage.v2021_06_01.models.MetricSpecification] + """ + + _attribute_map = { + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'}, + } + + def __init__( + self, + *, + metric_specifications: Optional[List["MetricSpecification"]] = None, + **kwargs + ): + super(ServiceSpecification, self).__init__(**kwargs) + self.metric_specifications = metric_specifications + + +class SignedIdentifier(msrest.serialization.Model): + """SignedIdentifier. + + :param id: An unique identifier of the stored access policy. + :type id: str + :param access_policy: Access policy. + :type access_policy: ~azure.mgmt.storage.v2021_06_01.models.AccessPolicy + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'access_policy': {'key': 'accessPolicy', 'type': 'AccessPolicy'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + access_policy: Optional["AccessPolicy"] = None, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = id + self.access_policy = access_policy + + +class Sku(msrest.serialization.Model): + """The SKU of the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The SKU name. Required for account creation; optional for update. Note + that in older versions, SKU name was called accountType. Possible values include: + "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Possible values include: "Standard", + "Premium". + :vartype tier: str or ~azure.mgmt.storage.v2021_06_01.models.SkuTier + """ + + _validation = { + 'name': {'required': True}, + 'tier': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Union[str, "SkuName"], + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = name + self.tier = None + + +class SKUCapability(msrest.serialization.Model): + """The capability information in the specified SKU, including file encryption, network ACLs, change notification, etc. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of capability, The capability information in the specified SKU, including + file encryption, network ACLs, change notification, etc. + :vartype name: str + :ivar value: A string value to indicate states of given capability. Possibly 'true' or 'false'. + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SKUCapability, self).__init__(**kwargs) + self.name = None + self.value = None + + +class SkuInformation(msrest.serialization.Model): + """Storage SKU and its properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The SKU name. Required for account creation; optional for update. Note + that in older versions, SKU name was called accountType. Possible values include: + "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", "Premium_ZRS", + "Standard_GZRS", "Standard_RAGZRS". + :type name: str or ~azure.mgmt.storage.v2021_06_01.models.SkuName + :ivar tier: The SKU tier. This is based on the SKU name. Possible values include: "Standard", + "Premium". + :vartype tier: str or ~azure.mgmt.storage.v2021_06_01.models.SkuTier + :ivar resource_type: The type of the resource, usually it is 'storageAccounts'. + :vartype resource_type: str + :ivar kind: Indicates the type of storage account. Possible values include: "Storage", + "StorageV2", "BlobStorage", "FileStorage", "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :ivar locations: The set of locations that the SKU is available. This will be supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). + :vartype locations: list[str] + :ivar capabilities: The capability information in the specified SKU, including file encryption, + network ACLs, change notification, etc. + :vartype capabilities: list[~azure.mgmt.storage.v2021_06_01.models.SKUCapability] + :param restrictions: The restrictions because of which SKU cannot be used. This is empty if + there are no restrictions. + :type restrictions: list[~azure.mgmt.storage.v2021_06_01.models.Restriction] + """ + + _validation = { + 'name': {'required': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'kind': {'readonly': True}, + 'locations': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'locations': {'key': 'locations', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__( + self, + *, + name: Union[str, "SkuName"], + restrictions: Optional[List["Restriction"]] = None, + **kwargs + ): + super(SkuInformation, self).__init__(**kwargs) + self.name = name + self.tier = None + self.resource_type = None + self.kind = None + self.locations = None + self.capabilities = None + self.restrictions = restrictions + + +class SmbSetting(msrest.serialization.Model): + """Setting for SMB protocol. + + :param multichannel: Multichannel setting. Applies to Premium FileStorage only. + :type multichannel: ~azure.mgmt.storage.v2021_06_01.models.Multichannel + :param versions: SMB protocol versions supported by server. Valid values are SMB2.1, SMB3.0, + SMB3.1.1. Should be passed as a string with delimiter ';'. + :type versions: str + :param authentication_methods: SMB authentication methods supported by server. Valid values are + NTLMv2, Kerberos. Should be passed as a string with delimiter ';'. + :type authentication_methods: str + :param kerberos_ticket_encryption: Kerberos ticket encryption supported by server. Valid values + are RC4-HMAC, AES-256. Should be passed as a string with delimiter ';'. + :type kerberos_ticket_encryption: str + :param channel_encryption: SMB channel encryption supported by server. Valid values are + AES-128-CCM, AES-128-GCM, AES-256-GCM. Should be passed as a string with delimiter ';'. + :type channel_encryption: str + """ + + _attribute_map = { + 'multichannel': {'key': 'multichannel', 'type': 'Multichannel'}, + 'versions': {'key': 'versions', 'type': 'str'}, + 'authentication_methods': {'key': 'authenticationMethods', 'type': 'str'}, + 'kerberos_ticket_encryption': {'key': 'kerberosTicketEncryption', 'type': 'str'}, + 'channel_encryption': {'key': 'channelEncryption', 'type': 'str'}, + } + + def __init__( + self, + *, + multichannel: Optional["Multichannel"] = None, + versions: Optional[str] = None, + authentication_methods: Optional[str] = None, + kerberos_ticket_encryption: Optional[str] = None, + channel_encryption: Optional[str] = None, + **kwargs + ): + super(SmbSetting, self).__init__(**kwargs) + self.multichannel = multichannel + self.versions = versions + self.authentication_methods = authentication_methods + self.kerberos_ticket_encryption = kerberos_ticket_encryption + self.channel_encryption = channel_encryption + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + + +class StorageAccount(TrackedResource): + """The storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :ivar sku: Gets the SKU. + :vartype sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :ivar kind: Gets the Kind. Possible values include: "Storage", "StorageV2", "BlobStorage", + "FileStorage", "BlockBlobStorage". + :vartype kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param extended_location: The extendedLocation of the resource. + :type extended_location: ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocation + :ivar provisioning_state: Gets the status of the storage account at the time the operation was + called. Possible values include: "Creating", "ResolvingDNS", "Succeeded". + :vartype provisioning_state: str or ~azure.mgmt.storage.v2021_06_01.models.ProvisioningState + :ivar primary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob + endpoint. + :vartype primary_endpoints: ~azure.mgmt.storage.v2021_06_01.models.Endpoints + :ivar primary_location: Gets the location of the primary data center for the storage account. + :vartype primary_location: str + :ivar status_of_primary: Gets the status indicating whether the primary location of the storage + account is available or unavailable. Possible values include: "available", "unavailable". + :vartype status_of_primary: str or ~azure.mgmt.storage.v2021_06_01.models.AccountStatus + :ivar last_geo_failover_time: Gets the timestamp of the most recent instance of a failover to + the secondary location. Only the most recent timestamp is retained. This element is not + returned if there has never been a failover instance. Only available if the accountType is + Standard_GRS or Standard_RAGRS. + :vartype last_geo_failover_time: ~datetime.datetime + :ivar secondary_location: Gets the location of the geo-replicated secondary for the storage + account. Only available if the accountType is Standard_GRS or Standard_RAGRS. + :vartype secondary_location: str + :ivar status_of_secondary: Gets the status indicating whether the secondary location of the + storage account is available or unavailable. Only available if the SKU name is Standard_GRS or + Standard_RAGRS. Possible values include: "available", "unavailable". + :vartype status_of_secondary: str or ~azure.mgmt.storage.v2021_06_01.models.AccountStatus + :ivar creation_time: Gets the creation date and time of the storage account in UTC. + :vartype creation_time: ~datetime.datetime + :ivar custom_domain: Gets the custom domain the user assigned to this storage account. + :vartype custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :ivar sas_policy: SasPolicy assigned to the storage account. + :vartype sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :ivar key_policy: KeyPolicy assigned to the storage account. + :vartype key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :ivar key_creation_time: Storage account keys creation time. + :vartype key_creation_time: ~azure.mgmt.storage.v2021_06_01.models.KeyCreationTime + :ivar secondary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, + queue, or table object from the secondary location of the storage account. Only available if + the SKU name is Standard_RAGRS. + :vartype secondary_endpoints: ~azure.mgmt.storage.v2021_06_01.models.Endpoints + :ivar encryption: Gets the encryption settings on the account. If unspecified, the account is + unencrypted. + :vartype encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier used + for billing. Possible values include: "Hot", "Cool". + :vartype access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :type enable_https_traffic_only: bool + :ivar network_rule_set: Network rule set. + :vartype network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :type is_hns_enabled: bool + :ivar geo_replication_stats: Geo Replication Stats. + :vartype geo_replication_stats: ~azure.mgmt.storage.v2021_06_01.models.GeoReplicationStats + :ivar failover_in_progress: If the failover is in progress, the value will be true, otherwise, + it will be null. + :vartype failover_in_progress: bool + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :ivar private_endpoint_connections: List of private endpoint connection associated with the + specified storage account. + :vartype private_endpoint_connections: + list[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection] + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :ivar blob_restore_status: Blob restore status. + :vartype blob_restore_status: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreStatus + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :type enable_nfs_v3: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'sku': {'readonly': True}, + 'kind': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'primary_endpoints': {'readonly': True}, + 'primary_location': {'readonly': True}, + 'status_of_primary': {'readonly': True}, + 'last_geo_failover_time': {'readonly': True}, + 'secondary_location': {'readonly': True}, + 'status_of_secondary': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'custom_domain': {'readonly': True}, + 'sas_policy': {'readonly': True}, + 'key_policy': {'readonly': True}, + 'key_creation_time': {'readonly': True}, + 'secondary_endpoints': {'readonly': True}, + 'encryption': {'readonly': True}, + 'access_tier': {'readonly': True}, + 'network_rule_set': {'readonly': True}, + 'geo_replication_stats': {'readonly': True}, + 'failover_in_progress': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'blob_restore_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'primary_endpoints': {'key': 'properties.primaryEndpoints', 'type': 'Endpoints'}, + 'primary_location': {'key': 'properties.primaryLocation', 'type': 'str'}, + 'status_of_primary': {'key': 'properties.statusOfPrimary', 'type': 'str'}, + 'last_geo_failover_time': {'key': 'properties.lastGeoFailoverTime', 'type': 'iso-8601'}, + 'secondary_location': {'key': 'properties.secondaryLocation', 'type': 'str'}, + 'status_of_secondary': {'key': 'properties.statusOfSecondary', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'key_creation_time': {'key': 'properties.keyCreationTime', 'type': 'KeyCreationTime'}, + 'secondary_endpoints': {'key': 'properties.secondaryEndpoints', 'type': 'Endpoints'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'is_hns_enabled': {'key': 'properties.isHnsEnabled', 'type': 'bool'}, + 'geo_replication_stats': {'key': 'properties.geoReplicationStats', 'type': 'GeoReplicationStats'}, + 'failover_in_progress': {'key': 'properties.failoverInProgress', 'type': 'bool'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'blob_restore_status': {'key': 'properties.blobRestoreStatus', 'type': 'BlobRestoreStatus'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'enable_nfs_v3': {'key': 'properties.isNfsV3Enabled', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + identity: Optional["Identity"] = None, + extended_location: Optional["ExtendedLocation"] = None, + azure_files_identity_based_authentication: Optional["AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + is_hns_enabled: Optional[bool] = None, + large_file_shares_state: Optional[Union[str, "LargeFileSharesState"]] = None, + routing_preference: Optional["RoutingPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + enable_nfs_v3: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, + immutable_storage_with_versioning: Optional["ImmutableStorageAccount"] = None, + **kwargs + ): + super(StorageAccount, self).__init__(tags=tags, location=location, **kwargs) + self.sku = None + self.kind = None + self.identity = identity + self.extended_location = extended_location + self.provisioning_state = None + self.primary_endpoints = None + self.primary_location = None + self.status_of_primary = None + self.last_geo_failover_time = None + self.secondary_location = None + self.status_of_secondary = None + self.creation_time = None + self.custom_domain = None + self.sas_policy = None + self.key_policy = None + self.key_creation_time = None + self.secondary_endpoints = None + self.encryption = None + self.access_tier = None + self.azure_files_identity_based_authentication = azure_files_identity_based_authentication + self.enable_https_traffic_only = enable_https_traffic_only + self.network_rule_set = None + self.is_hns_enabled = is_hns_enabled + self.geo_replication_stats = None + self.failover_in_progress = None + self.large_file_shares_state = large_file_shares_state + self.private_endpoint_connections = None + self.routing_preference = routing_preference + self.blob_restore_status = None + self.allow_blob_public_access = allow_blob_public_access + self.minimum_tls_version = minimum_tls_version + self.allow_shared_key_access = allow_shared_key_access + self.enable_nfs_v3 = enable_nfs_v3 + self.allow_cross_tenant_replication = allow_cross_tenant_replication + self.default_to_o_auth_authentication = default_to_o_auth_authentication + self.public_network_access = public_network_access + self.immutable_storage_with_versioning = immutable_storage_with_versioning + + +class StorageAccountCheckNameAvailabilityParameters(msrest.serialization.Model): + """The parameters used to check the availability of the storage account name. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The storage account name. + :type name: str + :ivar type: The type of resource, Microsoft.Storage/storageAccounts. Has constant value: + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + type = "Microsoft.Storage/storageAccounts" + + def __init__( + self, + *, + name: str, + **kwargs + ): + super(StorageAccountCheckNameAvailabilityParameters, self).__init__(**kwargs) + self.name = name + + +class StorageAccountCreateParameters(msrest.serialization.Model): + """The parameters used when creating a storage account. + + All required parameters must be populated in order to send to Azure. + + :param sku: Required. Required. Gets or sets the SKU name. + :type sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param kind: Required. Required. Indicates the type of storage account. Possible values + include: "Storage", "StorageV2", "BlobStorage", "FileStorage", "BlockBlobStorage". + :type kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param location: Required. Required. Gets or sets the location of the resource. This will be + one of the supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, + etc.). The geo region of a resource cannot be changed once it is created, but if an identical + geo region is specified on update, the request will succeed. + :type location: str + :param extended_location: Optional. Set the extended location of the resource. If not set, the + storage account will be created in Azure main region. Otherwise it will be created in the + specified extended location. + :type extended_location: ~azure.mgmt.storage.v2021_06_01.models.ExtendedLocation + :param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource. + These tags can be used for viewing and grouping this resource (across resource groups). A + maximum of 15 tags can be provided for a resource. Each tag must have a key with a length no + greater than 128 characters and a value with a length no greater than 256 characters. + :type tags: dict[str, str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param sas_policy: SasPolicy assigned to the storage account. + :type sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :param key_policy: KeyPolicy assigned to the storage account. + :type key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :param custom_domain: User domain assigned to the storage account. Name is the CNAME source. + Only one custom domain is supported per storage account at this time. To clear the existing + custom domain, use an empty string for the custom domain name property. + :type custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :param encryption: Not applicable. Azure Storage encryption is enabled for all storage accounts + and cannot be disabled. + :type encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :param network_rule_set: Network rule set. + :type network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier + used for billing. Possible values include: "Hot", "Cool". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + The default value is true since API version 2019-04-01. + :type enable_https_traffic_only: bool + :param is_hns_enabled: Account HierarchicalNamespace enabled if sets to true. + :type is_hns_enabled: bool + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param enable_nfs_v3: NFS 3.0 protocol support enabled if set to true. + :type enable_nfs_v3: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the new containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _validation = { + 'sku': {'required': True}, + 'kind': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'extended_location': {'key': 'extendedLocation', 'type': 'ExtendedLocation'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'is_hns_enabled': {'key': 'properties.isHnsEnabled', 'type': 'bool'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'enable_nfs_v3': {'key': 'properties.isNfsV3Enabled', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + *, + sku: "Sku", + kind: Union[str, "Kind"], + location: str, + extended_location: Optional["ExtendedLocation"] = None, + tags: Optional[Dict[str, str]] = None, + identity: Optional["Identity"] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, + sas_policy: Optional["SasPolicy"] = None, + key_policy: Optional["KeyPolicy"] = None, + custom_domain: Optional["CustomDomain"] = None, + encryption: Optional["Encryption"] = None, + network_rule_set: Optional["NetworkRuleSet"] = None, + access_tier: Optional[Union[str, "AccessTier"]] = None, + azure_files_identity_based_authentication: Optional["AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + is_hns_enabled: Optional[bool] = None, + large_file_shares_state: Optional[Union[str, "LargeFileSharesState"]] = None, + routing_preference: Optional["RoutingPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + enable_nfs_v3: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + immutable_storage_with_versioning: Optional["ImmutableStorageAccount"] = None, + **kwargs + ): + super(StorageAccountCreateParameters, self).__init__(**kwargs) + self.sku = sku + self.kind = kind + self.location = location + self.extended_location = extended_location + self.tags = tags + self.identity = identity + self.public_network_access = public_network_access + self.sas_policy = sas_policy + self.key_policy = key_policy + self.custom_domain = custom_domain + self.encryption = encryption + self.network_rule_set = network_rule_set + self.access_tier = access_tier + self.azure_files_identity_based_authentication = azure_files_identity_based_authentication + self.enable_https_traffic_only = enable_https_traffic_only + self.is_hns_enabled = is_hns_enabled + self.large_file_shares_state = large_file_shares_state + self.routing_preference = routing_preference + self.allow_blob_public_access = allow_blob_public_access + self.minimum_tls_version = minimum_tls_version + self.allow_shared_key_access = allow_shared_key_access + self.enable_nfs_v3 = enable_nfs_v3 + self.allow_cross_tenant_replication = allow_cross_tenant_replication + self.default_to_o_auth_authentication = default_to_o_auth_authentication + self.immutable_storage_with_versioning = immutable_storage_with_versioning + + +class StorageAccountInternetEndpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, file, web or dfs object via a internet routing endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + _validation = { + 'blob': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountInternetEndpoints, self).__init__(**kwargs) + self.blob = None + self.file = None + self.web = None + self.dfs = None + + +class StorageAccountKey(msrest.serialization.Model): + """An access key for the storage account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar key_name: Name of the key. + :vartype key_name: str + :ivar value: Base 64-encoded value of the key. + :vartype value: str + :ivar permissions: Permissions for the key -- read-only or full permissions. Possible values + include: "Read", "Full". + :vartype permissions: str or ~azure.mgmt.storage.v2021_06_01.models.KeyPermission + :ivar creation_time: Creation time of the key, in round trip date format. + :vartype creation_time: ~datetime.datetime + """ + + _validation = { + 'key_name': {'readonly': True}, + 'value': {'readonly': True}, + 'permissions': {'readonly': True}, + 'creation_time': {'readonly': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountKey, self).__init__(**kwargs) + self.key_name = None + self.value = None + self.permissions = None + self.creation_time = None + + +class StorageAccountListKeysResult(msrest.serialization.Model): + """The response from the ListKeys operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar keys: Gets the list of storage account keys and their properties for the specified + storage account. + :vartype keys: list[~azure.mgmt.storage.v2021_06_01.models.StorageAccountKey] + """ + + _validation = { + 'keys': {'readonly': True}, + } + + _attribute_map = { + 'keys': {'key': 'keys', 'type': '[StorageAccountKey]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountListKeysResult, self).__init__(**kwargs) + self.keys = None + + +class StorageAccountListResult(msrest.serialization.Model): + """The response from the List Storage Accounts operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets the list of storage accounts and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.StorageAccount] + :ivar next_link: Request URL that can be used to query next page of storage accounts. Returned + when total number of requested storage accounts exceed maximum page size. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StorageAccount]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountListResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class StorageAccountMicrosoftEndpoints(msrest.serialization.Model): + """The URIs that are used to perform a retrieval of a public blob, queue, table, web or dfs object via a microsoft routing endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar blob: Gets the blob endpoint. + :vartype blob: str + :ivar queue: Gets the queue endpoint. + :vartype queue: str + :ivar table: Gets the table endpoint. + :vartype table: str + :ivar file: Gets the file endpoint. + :vartype file: str + :ivar web: Gets the web endpoint. + :vartype web: str + :ivar dfs: Gets the dfs endpoint. + :vartype dfs: str + """ + + _validation = { + 'blob': {'readonly': True}, + 'queue': {'readonly': True}, + 'table': {'readonly': True}, + 'file': {'readonly': True}, + 'web': {'readonly': True}, + 'dfs': {'readonly': True}, + } + + _attribute_map = { + 'blob': {'key': 'blob', 'type': 'str'}, + 'queue': {'key': 'queue', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'web': {'key': 'web', 'type': 'str'}, + 'dfs': {'key': 'dfs', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountMicrosoftEndpoints, self).__init__(**kwargs) + self.blob = None + self.queue = None + self.table = None + self.file = None + self.web = None + self.dfs = None + + +class StorageAccountRegenerateKeyParameters(msrest.serialization.Model): + """The parameters used to regenerate the storage account key. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of storage keys that want to be regenerated, possible + values are key1, key2, kerb1, kerb2. + :type key_name: str + """ + + _validation = { + 'key_name': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + *, + key_name: str, + **kwargs + ): + super(StorageAccountRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name + + +class StorageAccountUpdateParameters(msrest.serialization.Model): + """The parameters that can be provided when updating the storage account properties. + + :param sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to + Standard_ZRS, Premium_LRS or Premium_ZRS, nor can accounts of those SKU names be updated to any + other value. + :type sku: ~azure.mgmt.storage.v2021_06_01.models.Sku + :param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource. + These tags can be used in viewing and grouping this resource (across resource groups). A + maximum of 15 tags can be provided for a resource. Each tag must have a key no greater in + length than 128 characters and a value no greater in length than 256 characters. + :type tags: dict[str, str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.storage.v2021_06_01.models.Identity + :param kind: Optional. Indicates the type of storage account. Currently only StorageV2 value + supported by server. Possible values include: "Storage", "StorageV2", "BlobStorage", + "FileStorage", "BlockBlobStorage". + :type kind: str or ~azure.mgmt.storage.v2021_06_01.models.Kind + :param custom_domain: Custom domain assigned to the storage account by the user. Name is the + CNAME source. Only one custom domain is supported per storage account at this time. To clear + the existing custom domain, use an empty string for the custom domain name property. + :type custom_domain: ~azure.mgmt.storage.v2021_06_01.models.CustomDomain + :param encryption: Provides the encryption settings on the account. The default setting is + unencrypted. + :type encryption: ~azure.mgmt.storage.v2021_06_01.models.Encryption + :param sas_policy: SasPolicy assigned to the storage account. + :type sas_policy: ~azure.mgmt.storage.v2021_06_01.models.SasPolicy + :param key_policy: KeyPolicy assigned to the storage account. + :type key_policy: ~azure.mgmt.storage.v2021_06_01.models.KeyPolicy + :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier + used for billing. Possible values include: "Hot", "Cool". + :type access_tier: str or ~azure.mgmt.storage.v2021_06_01.models.AccessTier + :param azure_files_identity_based_authentication: Provides the identity based authentication + settings for Azure Files. + :type azure_files_identity_based_authentication: + ~azure.mgmt.storage.v2021_06_01.models.AzureFilesIdentityBasedAuthentication + :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. + :type enable_https_traffic_only: bool + :param network_rule_set: Network rule set. + :type network_rule_set: ~azure.mgmt.storage.v2021_06_01.models.NetworkRuleSet + :param large_file_shares_state: Allow large file shares if sets to Enabled. It cannot be + disabled once it is enabled. Possible values include: "Disabled", "Enabled". + :type large_file_shares_state: str or + ~azure.mgmt.storage.v2021_06_01.models.LargeFileSharesState + :param routing_preference: Maintains information about the network routing choice opted by the + user for data transfer. + :type routing_preference: ~azure.mgmt.storage.v2021_06_01.models.RoutingPreference + :param allow_blob_public_access: Allow or disallow public access to all blobs or containers in + the storage account. The default interpretation is true for this property. + :type allow_blob_public_access: bool + :param minimum_tls_version: Set the minimum TLS version to be permitted on requests to storage. + The default interpretation is TLS 1.0 for this property. Possible values include: "TLS1_0", + "TLS1_1", "TLS1_2". + :type minimum_tls_version: str or ~azure.mgmt.storage.v2021_06_01.models.MinimumTlsVersion + :param allow_shared_key_access: Indicates whether the storage account permits requests to be + authorized with the account access key via Shared Key. If false, then all requests, including + shared access signatures, must be authorized with Azure Active Directory (Azure AD). The + default value is null, which is equivalent to true. + :type allow_shared_key_access: bool + :param allow_cross_tenant_replication: Allow or disallow cross AAD tenant object replication. + The default interpretation is true for this property. + :type allow_cross_tenant_replication: bool + :param default_to_o_auth_authentication: A boolean flag which indicates whether the default + authentication is OAuth or not. The default interpretation is false for this property. + :type default_to_o_auth_authentication: bool + :param public_network_access: Allow or disallow public network access to Storage Account. Value + is optional but if passed in, must be 'Enabled' or 'Disabled'. Possible values include: + "Enabled", "Disabled". + :type public_network_access: str or ~azure.mgmt.storage.v2021_06_01.models.PublicNetworkAccess + :param immutable_storage_with_versioning: The property is immutable and can only be set to true + at the account creation time. When set to true, it enables object level immutability for all + the containers in the account by default. + :type immutable_storage_with_versioning: + ~azure.mgmt.storage.v2021_06_01.models.ImmutableStorageAccount + """ + + _attribute_map = { + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, + 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, + 'sas_policy': {'key': 'properties.sasPolicy', 'type': 'SasPolicy'}, + 'key_policy': {'key': 'properties.keyPolicy', 'type': 'KeyPolicy'}, + 'access_tier': {'key': 'properties.accessTier', 'type': 'str'}, + 'azure_files_identity_based_authentication': {'key': 'properties.azureFilesIdentityBasedAuthentication', 'type': 'AzureFilesIdentityBasedAuthentication'}, + 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, + 'network_rule_set': {'key': 'properties.networkAcls', 'type': 'NetworkRuleSet'}, + 'large_file_shares_state': {'key': 'properties.largeFileSharesState', 'type': 'str'}, + 'routing_preference': {'key': 'properties.routingPreference', 'type': 'RoutingPreference'}, + 'allow_blob_public_access': {'key': 'properties.allowBlobPublicAccess', 'type': 'bool'}, + 'minimum_tls_version': {'key': 'properties.minimumTlsVersion', 'type': 'str'}, + 'allow_shared_key_access': {'key': 'properties.allowSharedKeyAccess', 'type': 'bool'}, + 'allow_cross_tenant_replication': {'key': 'properties.allowCrossTenantReplication', 'type': 'bool'}, + 'default_to_o_auth_authentication': {'key': 'properties.defaultToOAuthAuthentication', 'type': 'bool'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'immutable_storage_with_versioning': {'key': 'properties.immutableStorageWithVersioning', 'type': 'ImmutableStorageAccount'}, + } + + def __init__( + self, + *, + sku: Optional["Sku"] = None, + tags: Optional[Dict[str, str]] = None, + identity: Optional["Identity"] = None, + kind: Optional[Union[str, "Kind"]] = None, + custom_domain: Optional["CustomDomain"] = None, + encryption: Optional["Encryption"] = None, + sas_policy: Optional["SasPolicy"] = None, + key_policy: Optional["KeyPolicy"] = None, + access_tier: Optional[Union[str, "AccessTier"]] = None, + azure_files_identity_based_authentication: Optional["AzureFilesIdentityBasedAuthentication"] = None, + enable_https_traffic_only: Optional[bool] = None, + network_rule_set: Optional["NetworkRuleSet"] = None, + large_file_shares_state: Optional[Union[str, "LargeFileSharesState"]] = None, + routing_preference: Optional["RoutingPreference"] = None, + allow_blob_public_access: Optional[bool] = None, + minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None, + allow_shared_key_access: Optional[bool] = None, + allow_cross_tenant_replication: Optional[bool] = None, + default_to_o_auth_authentication: Optional[bool] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, + immutable_storage_with_versioning: Optional["ImmutableStorageAccount"] = None, + **kwargs + ): + super(StorageAccountUpdateParameters, self).__init__(**kwargs) + self.sku = sku + self.tags = tags + self.identity = identity + self.kind = kind + self.custom_domain = custom_domain + self.encryption = encryption + self.sas_policy = sas_policy + self.key_policy = key_policy + self.access_tier = access_tier + self.azure_files_identity_based_authentication = azure_files_identity_based_authentication + self.enable_https_traffic_only = enable_https_traffic_only + self.network_rule_set = network_rule_set + self.large_file_shares_state = large_file_shares_state + self.routing_preference = routing_preference + self.allow_blob_public_access = allow_blob_public_access + self.minimum_tls_version = minimum_tls_version + self.allow_shared_key_access = allow_shared_key_access + self.allow_cross_tenant_replication = allow_cross_tenant_replication + self.default_to_o_auth_authentication = default_to_o_auth_authentication + self.public_network_access = public_network_access + self.immutable_storage_with_versioning = immutable_storage_with_versioning + + +class StorageQueue(Resource): + """StorageQueue. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param metadata: A name-value pair that represents queue metadata. + :type metadata: dict[str, str] + :ivar approximate_message_count: Integer indicating an approximate number of messages in the + queue. This number is not lower than the actual number of messages in the queue, but could be + higher. + :vartype approximate_message_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'approximate_message_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'metadata': {'key': 'properties.metadata', 'type': '{str}'}, + 'approximate_message_count': {'key': 'properties.approximateMessageCount', 'type': 'int'}, + } + + def __init__( + self, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + super(StorageQueue, self).__init__(**kwargs) + self.metadata = metadata + self.approximate_message_count = None + + +class StorageSkuListResult(msrest.serialization.Model): + """The response from the List Storage SKUs operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Get the list result of storage SKUs and their properties. + :vartype value: list[~azure.mgmt.storage.v2021_06_01.models.SkuInformation] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SkuInformation]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageSkuListResult, self).__init__(**kwargs) + self.value = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.storage.v2021_06_01.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure.mgmt.storage.v2021_06_01.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class Table(Resource): + """Properties of the table, including Id, resource name, resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar table_name: Table name under the specified account. + :vartype table_name: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'table_name': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'properties.tableName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Table, self).__init__(**kwargs) + self.table_name = None + + +class TableServiceProperties(Resource): + """The properties of a storage account’s Table service. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param cors: Specifies CORS rules for the Table service. You can include up to five CorsRule + elements in the request. If no CorsRule elements are included in the request body, all CORS + rules will be deleted, and CORS will be disabled for the Table service. + :type cors: ~azure.mgmt.storage.v2021_06_01.models.CorsRules + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cors': {'key': 'properties.cors', 'type': 'CorsRules'}, + } + + def __init__( + self, + *, + cors: Optional["CorsRules"] = None, + **kwargs + ): + super(TableServiceProperties, self).__init__(**kwargs) + self.cors = cors + + +class TagFilter(msrest.serialization.Model): + """Blob index tag based filtering for blob objects. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. This is the filter tag name, it can have 1 - 128 characters. + :type name: str + :param op: Required. This is the comparison operator which is used for object comparison and + filtering. Only == (equality operator) is currently supported. + :type op: str + :param value: Required. This is the filter tag value field used for tag based filtering, it can + have 0 - 256 characters. + :type value: str + """ + + _validation = { + 'name': {'required': True, 'max_length': 128, 'min_length': 1}, + 'op': {'required': True}, + 'value': {'required': True, 'max_length': 256, 'min_length': 0}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'op': {'key': 'op', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + op: str, + value: str, + **kwargs + ): + super(TagFilter, self).__init__(**kwargs) + self.name = name + self.op = op + self.value = value + + +class TagProperty(msrest.serialization.Model): + """A tag of the LegalHold of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar tag: The tag value. + :vartype tag: str + :ivar timestamp: Returns the date and time the tag was added. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who added the tag. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who added the tag. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who added the tag. + :vartype upn: str + """ + + _validation = { + 'tag': {'readonly': True}, + 'timestamp': {'readonly': True}, + 'object_identifier': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'upn': {'readonly': True}, + } + + _attribute_map = { + 'tag': {'key': 'tag', 'type': 'str'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + 'object_identifier': {'key': 'objectIdentifier', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'upn': {'key': 'upn', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TagProperty, self).__init__(**kwargs) + self.tag = None + self.timestamp = None + self.object_identifier = None + self.tenant_id = None + self.upn = None + + +class UpdateHistoryProperty(msrest.serialization.Model): + """An update history of the ImmutabilityPolicy of a blob container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar update: The ImmutabilityPolicy update type of a blob container, possible values include: + put, lock and extend. Possible values include: "put", "lock", "extend". + :vartype update: str or ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicyUpdateType + :ivar immutability_period_since_creation_in_days: The immutability period for the blobs in the + container since the policy creation, in days. + :vartype immutability_period_since_creation_in_days: int + :ivar timestamp: Returns the date and time the ImmutabilityPolicy was updated. + :vartype timestamp: ~datetime.datetime + :ivar object_identifier: Returns the Object ID of the user who updated the ImmutabilityPolicy. + :vartype object_identifier: str + :ivar tenant_id: Returns the Tenant ID that issued the token for the user who updated the + ImmutabilityPolicy. + :vartype tenant_id: str + :ivar upn: Returns the User Principal Name of the user who updated the ImmutabilityPolicy. + :vartype upn: str + :param allow_protected_append_writes: This property can only be changed for unlocked time-based + retention policies. When enabled, new blocks can be written to an append blob while maintaining + immutability protection and compliance. Only new blocks can be added and any existing blocks + cannot be modified or deleted. This property cannot be changed with ExtendImmutabilityPolicy + API. + :type allow_protected_append_writes: bool + :param allow_protected_append_writes_all: This property can only be changed for unlocked + time-based retention policies. When enabled, new blocks can be written to both 'Append and Bock + Blobs' while maintaining immutability protection and compliance. Only new blocks can be added + and any existing blocks cannot be modified or deleted. This property cannot be changed with + ExtendImmutabilityPolicy API. The 'allowProtectedAppendWrites' and + 'allowProtectedAppendWritesAll' properties are mutually exclusive. + :type allow_protected_append_writes_all: bool + """ + + _validation = { + 'update': {'readonly': True}, + 'immutability_period_since_creation_in_days': {'readonly': True}, + 'timestamp': {'readonly': True}, + 'object_identifier': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'upn': {'readonly': True}, + } + + _attribute_map = { + 'update': {'key': 'update', 'type': 'str'}, + 'immutability_period_since_creation_in_days': {'key': 'immutabilityPeriodSinceCreationInDays', 'type': 'int'}, + 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, + 'object_identifier': {'key': 'objectIdentifier', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'upn': {'key': 'upn', 'type': 'str'}, + 'allow_protected_append_writes': {'key': 'allowProtectedAppendWrites', 'type': 'bool'}, + 'allow_protected_append_writes_all': {'key': 'allowProtectedAppendWritesAll', 'type': 'bool'}, + } + + def __init__( + self, + *, + allow_protected_append_writes: Optional[bool] = None, + allow_protected_append_writes_all: Optional[bool] = None, + **kwargs + ): + super(UpdateHistoryProperty, self).__init__(**kwargs) + self.update = None + self.immutability_period_since_creation_in_days = None + self.timestamp = None + self.object_identifier = None + self.tenant_id = None + self.upn = None + self.allow_protected_append_writes = allow_protected_append_writes + self.allow_protected_append_writes_all = allow_protected_append_writes_all + + +class Usage(msrest.serialization.Model): + """Describes Storage Resource Usage. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar unit: Gets the unit of measurement. Possible values include: "Count", "Bytes", "Seconds", + "Percent", "CountsPerSecond", "BytesPerSecond". + :vartype unit: str or ~azure.mgmt.storage.v2021_06_01.models.UsageUnit + :ivar current_value: Gets the current count of the allocated resources in the subscription. + :vartype current_value: int + :ivar limit: Gets the maximum count of the resources that can be allocated in the subscription. + :vartype limit: int + :ivar name: Gets the name of the type of usage. + :vartype name: ~azure.mgmt.storage.v2021_06_01.models.UsageName + """ + + _validation = { + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, + } + + _attribute_map = { + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'int'}, + 'limit': {'key': 'limit', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'UsageName'}, + } + + def __init__( + self, + **kwargs + ): + super(Usage, self).__init__(**kwargs) + self.unit = None + self.current_value = None + self.limit = None + self.name = None + + +class UsageListResult(msrest.serialization.Model): + """The response from the List Usages operation. + + :param value: Gets or sets the list of Storage Resource Usages. + :type value: list[~azure.mgmt.storage.v2021_06_01.models.Usage] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Usage]'}, + } + + def __init__( + self, + *, + value: Optional[List["Usage"]] = None, + **kwargs + ): + super(UsageListResult, self).__init__(**kwargs) + self.value = value + + +class UsageName(msrest.serialization.Model): + """The usage names that can be used; currently limited to StorageAccount. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: Gets a string describing the resource name. + :vartype value: str + :ivar localized_value: Gets a localized string describing the resource name. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class UserAssignedIdentity(msrest.serialization.Model): + """UserAssignedIdentity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the identity. + :vartype principal_id: str + :ivar client_id: The client ID of the identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class VirtualNetworkRule(msrest.serialization.Model): + """Virtual Network rule. + + All required parameters must be populated in order to send to Azure. + + :param virtual_network_resource_id: Required. Resource ID of a subnet, for example: + /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. + :type virtual_network_resource_id: str + :param action: The action of virtual network rule. The only acceptable values to pass in are + None and "Allow". The default value is None. + :type action: str + :param state: Gets the state of virtual network rule. Possible values include: "Provisioning", + "Deprovisioning", "Succeeded", "Failed", "NetworkSourceDeleted". + :type state: str or ~azure.mgmt.storage.v2021_06_01.models.State + """ + + _validation = { + 'virtual_network_resource_id': {'required': True}, + } + + _attribute_map = { + 'virtual_network_resource_id': {'key': 'id', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + } + + def __init__( + self, + *, + virtual_network_resource_id: str, + action: Optional[str] = None, + state: Optional[Union[str, "State"]] = None, + **kwargs + ): + super(VirtualNetworkRule, self).__init__(**kwargs) + self.virtual_network_resource_id = virtual_network_resource_id + self.action = action + self.state = state diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_storage_management_client_enums.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_storage_management_client_enums.py new file mode 100644 index 000000000000..fc867e84f062 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/models/_storage_management_client_enums.py @@ -0,0 +1,528 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Required for storage accounts where kind = BlobStorage. The access tier used for billing. + """ + + HOT = "Hot" + COOL = "Cool" + +class AccountImmutabilityPolicyState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The ImmutabilityPolicy state defines the mode of the policy. Disabled state disables the + policy, Unlocked state allows increase and decrease of immutability retention time and also + allows toggling allowProtectedAppendWrites property, Locked state only allows the increase of + the immutability retention time. A policy can only be created in a Disabled or Unlocked state + and can be toggled between the two states. Only a policy in an Unlocked state can transition to + a Locked state which cannot be reverted. + """ + + UNLOCKED = "Unlocked" + LOCKED = "Locked" + DISABLED = "Disabled" + +class AccountStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the status indicating whether the primary location of the storage account is available or + unavailable. + """ + + AVAILABLE = "available" + UNAVAILABLE = "unavailable" + +class BlobInventoryPolicyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + DEFAULT = "default" + +class BlobRestoreProgressStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of blob restore progress. Possible values are: - InProgress: Indicates that blob + restore is ongoing. - Complete: Indicates that blob restore has been completed successfully. - + Failed: Indicates that blob restore is failed. + """ + + IN_PROGRESS = "InProgress" + COMPLETE = "Complete" + FAILED = "Failed" + +class Bypass(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible values are + any combination of Logging|Metrics|AzureServices (For example, "Logging, Metrics"), or None to + bypass none of those traffics. + """ + + NONE = "None" + LOGGING = "Logging" + METRICS = "Metrics" + AZURE_SERVICES = "AzureServices" + +class CorsRuleAllowedMethodsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + DELETE = "DELETE" + GET = "GET" + HEAD = "HEAD" + MERGE = "MERGE" + POST = "POST" + OPTIONS = "OPTIONS" + PUT = "PUT" + +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class DefaultAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the default action of allow or deny when no other rules match. + """ + + ALLOW = "Allow" + DENY = "Deny" + +class DefaultSharePermission(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Default share permission for users using Kerberos authentication if RBAC role is not assigned. + """ + + NONE = "None" + STORAGE_FILE_DATA_SMB_SHARE_READER = "StorageFileDataSmbShareReader" + STORAGE_FILE_DATA_SMB_SHARE_CONTRIBUTOR = "StorageFileDataSmbShareContributor" + STORAGE_FILE_DATA_SMB_SHARE_ELEVATED_CONTRIBUTOR = "StorageFileDataSmbShareElevatedContributor" + +class DirectoryServiceOptions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the directory service used. + """ + + NONE = "None" + AADDS = "AADDS" + AD = "AD" + +class EnabledProtocols(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The authentication protocol that is used for the file share. Can only be specified when + creating a share. + """ + + SMB = "SMB" + NFS = "NFS" + +class EncryptionScopeSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, + Microsoft.KeyVault. + """ + + MICROSOFT_STORAGE = "Microsoft.Storage" + MICROSOFT_KEY_VAULT = "Microsoft.KeyVault" + +class EncryptionScopeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class ExpirationAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The SAS expiration action. Can only be Log. + """ + + LOG = "Log" + +class ExtendedLocationTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of extendedLocation. + """ + + EDGE_ZONE = "EdgeZone" + +class Format(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """This is a required field, it specifies the format for the inventory files. + """ + + CSV = "Csv" + PARQUET = "Parquet" + +class GeoReplicationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the secondary location. Possible values are: - Live: Indicates that the secondary + location is active and operational. - Bootstrap: Indicates initial synchronization from the + primary location to the secondary location is in progress.This typically occurs when + replication is first enabled. - Unavailable: Indicates that the secondary location is + temporarily unavailable. + """ + + LIVE = "Live" + BOOTSTRAP = "Bootstrap" + UNAVAILABLE = "Unavailable" + +class HttpProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The protocol permitted for a request made with the account SAS. + """ + + HTTPS_HTTP = "https,http" + HTTPS = "https" + +class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The identity type. + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + +class ImmutabilityPolicyState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked. + """ + + LOCKED = "Locked" + UNLOCKED = "Unlocked" + +class ImmutabilityPolicyUpdateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The ImmutabilityPolicy update type of a blob container, possible values include: put, lock and + extend. + """ + + PUT = "put" + LOCK = "lock" + EXTEND = "extend" + +class InventoryRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The valid value is Inventory + """ + + INVENTORY = "Inventory" + +class KeyPermission(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Permissions for the key -- read-only or full permissions. + """ + + READ = "Read" + FULL = "Full" + +class KeySource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Storage, + Microsoft.Keyvault + """ + + MICROSOFT_STORAGE = "Microsoft.Storage" + MICROSOFT_KEYVAULT = "Microsoft.Keyvault" + +class KeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Encryption key type to be used for the encryption service. 'Account' key type implies that an + account-scoped encryption key will be used. 'Service' key type implies that a default service + key is used. + """ + + SERVICE = "Service" + ACCOUNT = "Account" + +class Kind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of storage account. + """ + + STORAGE = "Storage" + STORAGE_V2 = "StorageV2" + BLOB_STORAGE = "BlobStorage" + FILE_STORAGE = "FileStorage" + BLOCK_BLOB_STORAGE = "BlockBlobStorage" + +class LargeFileSharesState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. + """ + + DISABLED = "Disabled" + ENABLED = "Enabled" + +class LeaseContainerRequestAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the lease action. Can be one of the available actions. + """ + + ACQUIRE = "Acquire" + RENEW = "Renew" + CHANGE = "Change" + RELEASE = "Release" + BREAK_ENUM = "Break" + +class LeaseDuration(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies whether the lease on a container is of infinite or fixed duration, only when the + container is leased. + """ + + INFINITE = "Infinite" + FIXED = "Fixed" + +class LeaseShareAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies the lease action. Can be one of the available actions. + """ + + ACQUIRE = "Acquire" + RENEW = "Renew" + CHANGE = "Change" + RELEASE = "Release" + BREAK_ENUM = "Break" + +class LeaseState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Lease state of the container. + """ + + AVAILABLE = "Available" + LEASED = "Leased" + EXPIRED = "Expired" + BREAKING = "Breaking" + BROKEN = "Broken" + +class LeaseStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The lease status of the container. + """ + + LOCKED = "Locked" + UNLOCKED = "Unlocked" + +class ListContainersInclude(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + DELETED = "deleted" + +class ManagementPolicyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + DEFAULT = "default" + +class MigrationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """This property denotes the container level immutability to object level immutability migration + state. + """ + + IN_PROGRESS = "InProgress" + COMPLETED = "Completed" + +class MinimumTlsVersion(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Set the minimum TLS version to be permitted on requests to storage. The default interpretation + is TLS 1.0 for this property. + """ + + TLS1_0 = "TLS1_0" + TLS1_1 = "TLS1_1" + TLS1_2 = "TLS1_2" + +class Name(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Name of the policy. The valid value is AccessTimeTracking. This field is currently read only + """ + + ACCESS_TIME_TRACKING = "AccessTimeTracking" + +class ObjectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """This is a required field. This field specifies the scope of the inventory created either at the + blob or container level. + """ + + BLOB = "Blob" + CONTAINER = "Container" + +class Permissions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The signed permissions for the account SAS. Possible values include: Read (r), Write (w), + Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). + """ + + R = "r" + D = "d" + W = "w" + L = "l" + A = "a" + C = "c" + U = "u" + P = "p" + +class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The current provisioning state. + """ + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + DELETING = "Deleting" + FAILED = "Failed" + +class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The private endpoint connection status. + """ + + PENDING = "Pending" + APPROVED = "Approved" + REJECTED = "Rejected" + +class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the status of the storage account at the time the operation was called. + """ + + CREATING = "Creating" + RESOLVING_DNS = "ResolvingDNS" + SUCCEEDED = "Succeeded" + +class PublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specifies whether data in the container may be accessed publicly and the level of access. + """ + + CONTAINER = "Container" + BLOB = "Blob" + NONE = "None" + +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Allow or disallow public network access to Storage Account. Value is optional but if passed in, + must be 'Enabled' or 'Disabled'. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class Reason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the reason that a storage account name could not be used. The Reason element is only + returned if NameAvailable is false. + """ + + ACCOUNT_NAME_INVALID = "AccountNameInvalid" + ALREADY_EXISTS = "AlreadyExists" + +class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The reason for the restriction. As of now this can be "QuotaId" or + "NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the + subscription does not belong to that quota. The "NotAvailableForSubscription" is related to + capacity at DC. + """ + + QUOTA_ID = "QuotaId" + NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" + +class RootSquashType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The property is for NFS share only. The default is NoRootSquash. + """ + + NO_ROOT_SQUASH = "NoRootSquash" + ROOT_SQUASH = "RootSquash" + ALL_SQUASH = "AllSquash" + +class RoutingChoice(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Routing Choice defines the kind of network routing opted by the user. + """ + + MICROSOFT_ROUTING = "MicrosoftRouting" + INTERNET_ROUTING = "InternetRouting" + +class RuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The valid value is Lifecycle + """ + + LIFECYCLE = "Lifecycle" + +class Schedule(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """This is a required field. This field is used to schedule an inventory formation. + """ + + DAILY = "Daily" + WEEKLY = "Weekly" + +class Services(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The signed services accessible with the account SAS. Possible values include: Blob (b), Queue + (q), Table (t), File (f). + """ + + B = "b" + Q = "q" + T = "t" + F = "f" + +class ShareAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Access tier for specific share. GpV2 account can choose between TransactionOptimized (default), + Hot, and Cool. FileStorage account can choose Premium. + """ + + TRANSACTION_OPTIMIZED = "TransactionOptimized" + HOT = "Hot" + COOL = "Cool" + PREMIUM = "Premium" + +class SignedResource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The signed services accessible with the service SAS. Possible values include: Blob (b), + Container (c), File (f), Share (s). + """ + + B = "b" + C = "c" + F = "f" + S = "s" + +class SignedResourceTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The signed resource types that are accessible with the account SAS. Service (s): Access to + service-level APIs; Container (c): Access to container-level APIs; Object (o): Access to + object-level APIs for blobs, queue messages, table entities, and files. + """ + + S = "s" + C = "c" + O = "o" + +class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The SKU name. Required for account creation; optional for update. Note that in older versions, + SKU name was called accountType. + """ + + STANDARD_LRS = "Standard_LRS" + STANDARD_GRS = "Standard_GRS" + STANDARD_RAGRS = "Standard_RAGRS" + STANDARD_ZRS = "Standard_ZRS" + PREMIUM_LRS = "Premium_LRS" + PREMIUM_ZRS = "Premium_ZRS" + STANDARD_GZRS = "Standard_GZRS" + STANDARD_RAGZRS = "Standard_RAGZRS" + +class SkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The SKU tier. This is based on the SKU name. + """ + + STANDARD = "Standard" + PREMIUM = "Premium" + +class State(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the state of virtual network rule. + """ + + PROVISIONING = "Provisioning" + DEPROVISIONING = "Deprovisioning" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + NETWORK_SOURCE_DELETED = "NetworkSourceDeleted" + +class StorageAccountExpand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + GEO_REPLICATION_STATS = "geoReplicationStats" + BLOB_RESTORE_STATUS = "blobRestoreStatus" + +class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the unit of measurement. + """ + + COUNT = "Count" + BYTES = "Bytes" + SECONDS = "Seconds" + PERCENT = "Percent" + COUNTS_PER_SECOND = "CountsPerSecond" + BYTES_PER_SECOND = "BytesPerSecond" diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/__init__.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/__init__.py new file mode 100644 index 000000000000..bddcf8c8cb34 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/__init__.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._skus_operations import SkusOperations +from ._storage_accounts_operations import StorageAccountsOperations +from ._deleted_accounts_operations import DeletedAccountsOperations +from ._usages_operations import UsagesOperations +from ._management_policies_operations import ManagementPoliciesOperations +from ._blob_inventory_policies_operations import BlobInventoryPoliciesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._object_replication_policies_operations import ObjectReplicationPoliciesOperations +from ._encryption_scopes_operations import EncryptionScopesOperations +from ._blob_services_operations import BlobServicesOperations +from ._blob_containers_operations import BlobContainersOperations +from ._file_services_operations import FileServicesOperations +from ._file_shares_operations import FileSharesOperations +from ._queue_services_operations import QueueServicesOperations +from ._queue_operations import QueueOperations +from ._table_services_operations import TableServicesOperations +from ._table_operations import TableOperations + +__all__ = [ + 'Operations', + 'SkusOperations', + 'StorageAccountsOperations', + 'DeletedAccountsOperations', + 'UsagesOperations', + 'ManagementPoliciesOperations', + 'BlobInventoryPoliciesOperations', + 'PrivateEndpointConnectionsOperations', + 'PrivateLinkResourcesOperations', + 'ObjectReplicationPoliciesOperations', + 'EncryptionScopesOperations', + 'BlobServicesOperations', + 'BlobContainersOperations', + 'FileServicesOperations', + 'FileSharesOperations', + 'QueueServicesOperations', + 'QueueOperations', + 'TableServicesOperations', + 'TableOperations', +] diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_containers_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_containers_operations.py new file mode 100644 index 000000000000..c992b0cbd435 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_containers_operations.py @@ -0,0 +1,1227 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BlobContainersOperations(object): + """BlobContainersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + maxpagesize=None, # type: Optional[str] + filter=None, # type: Optional[str] + include=None, # type: Optional[Union[str, "_models.ListContainersInclude"]] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListContainerItems"] + """Lists all containers and does not support a prefix like data plane. Also SRP today does not + return continuation token. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional. Specified maximum number of containers that can be included in + the list. + :type maxpagesize: str + :param filter: Optional. When specified, only container names starting with the filter will be + listed. + :type filter: str + :param include: Optional, used to include the properties for soft deleted blob containers. + :type include: str or ~azure.mgmt.storage.v2021_06_01.models.ListContainersInclude + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListContainerItems or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListContainerItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainerItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if include is not None: + query_parameters['$include'] = self._serialize.query("include", include, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListContainerItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers'} # type: ignore + + def create( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + blob_container, # type: "_models.BlobContainer" + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobContainer" + """Creates a new container under the specified account as described by request body. The container + resource includes metadata and properties for that container. It does not include a list of the + blobs contained by the container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param blob_container: Properties of the blob container to create. + :type blob_container: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blob_container, 'BlobContainer') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + blob_container, # type: "_models.BlobContainer" + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobContainer" + """Updates container properties as specified in request body. Properties not mentioned in the + request will be unchanged. Update fails if the specified container doesn't already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param blob_container: Properties to update for the blob container. + :type blob_container: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(blob_container, 'BlobContainer') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobContainer" + """Gets properties of a specified container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobContainer, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes specified container under its account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore + + def set_legal_hold( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + legal_hold, # type: "_models.LegalHold" + **kwargs # type: Any + ): + # type: (...) -> "_models.LegalHold" + """Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold + follows an append pattern and does not clear out the existing tags that are not specified in + the request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param legal_hold: The LegalHold property that will be set to a blob container. + :type legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LegalHold, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(legal_hold, 'LegalHold') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LegalHold', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold'} # type: ignore + + def clear_legal_hold( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + legal_hold, # type: "_models.LegalHold" + **kwargs # type: Any + ): + # type: (...) -> "_models.LegalHold" + """Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent + operation. ClearLegalHold clears out only the specified tags in the request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param legal_hold: The LegalHold property that will be clear from a blob container. + :type legal_hold: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LegalHold, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LegalHold + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.clear_legal_hold.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(legal_hold, 'LegalHold') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LegalHold', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + clear_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold'} # type: ignore + + def create_or_update_immutability_policy( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + if_match=None, # type: Optional[str] + parameters=None, # type: Optional["_models.ImmutabilityPolicy"] + **kwargs # type: Any + ): + # type: (...) -> "_models.ImmutabilityPolicy" + """Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but + not required for this operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob + container. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'ImmutabilityPolicy') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create_or_update_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + def get_immutability_policy( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ImmutabilityPolicy" + """Gets the existing immutability policy along with the corresponding ETag in response headers and + body. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + def delete_immutability_policy( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + if_match, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ImmutabilityPolicy" + """Aborts an unlocked immutability policy. The response of delete has + immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this + operation. Deleting a locked immutability policy is not allowed, the only way is to delete the + container after deleting all expired blobs inside the policy locked container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + immutability_policy_name = "default" + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'immutabilityPolicyName': self._serialize.url("immutability_policy_name", immutability_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + delete_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore + + def lock_immutability_policy( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + if_match, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ImmutabilityPolicy" + """Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is + ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.lock_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + lock_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock'} # type: ignore + + def extend_immutability_policy( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + if_match, # type: str + parameters=None, # type: Optional["_models.ImmutabilityPolicy"] + **kwargs # type: Any + ): + # type: (...) -> "_models.ImmutabilityPolicy" + """Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only + action allowed on a Locked policy will be this action. ETag in If-Match is required for this + operation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param if_match: The entity state (ETag) version of the immutability policy to update. A value + of "*" can be used to apply the operation only if the immutability policy already exists. If + omitted, this operation will always be applied. + :type if_match: str + :param parameters: The ImmutabilityPolicy Properties that will be extended for a blob + container. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ImmutabilityPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ImmutabilityPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.extend_immutability_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'ImmutabilityPolicy') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + extend_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend'} # type: ignore + + def lease( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + parameters=None, # type: Optional["_models.LeaseContainerRequest"] + **kwargs # type: Any + ): + # type: (...) -> "_models.LeaseContainerResponse" + """The Lease Container operation establishes and manages a lock on a container for delete + operations. The lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :param parameters: Lease Container request body. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LeaseContainerResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LeaseContainerResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseContainerResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.lease.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'LeaseContainerRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LeaseContainerResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease'} # type: ignore + + def _object_level_worm_initial( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._object_level_worm_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _object_level_worm_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate'} # type: ignore + + def begin_object_level_worm( + self, + resource_group_name, # type: str + account_name, # type: str + container_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """This operation migrates a blob container from container level WORM to object level immutability + enabled container. Prerequisites require a container level immutability policy either in locked + or unlocked state, Account level versioning must be enabled and there should be no Legal hold + on the container. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param container_name: The name of the blob container within the specified storage account. + Blob container names must be between 3 and 63 characters in length and use numbers, lower-case + letters and dash (-) only. Every dash (-) character must be immediately preceded and followed + by a letter or number. + :type container_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._object_level_worm_initial( + resource_group_name=resource_group_name, + account_name=account_name, + container_name=container_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'containerName': self._serialize.url("container_name", container_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_object_level_worm.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/migrate'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_inventory_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_inventory_policies_operations.py new file mode 100644 index 000000000000..a4a20167ddfc --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_inventory_policies_operations.py @@ -0,0 +1,334 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BlobInventoryPoliciesOperations(object): + """BlobInventoryPoliciesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + blob_inventory_policy_name, # type: Union[str, "_models.BlobInventoryPolicyName"] + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobInventoryPolicy" + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobInventoryPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobInventoryPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + account_name, # type: str + blob_inventory_policy_name, # type: Union[str, "_models.BlobInventoryPolicyName"] + properties, # type: "_models.BlobInventoryPolicy" + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobInventoryPolicy" + """Sets the blob inventory policy to the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :param properties: The blob inventory policy set to a storage account. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobInventoryPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'BlobInventoryPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobInventoryPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + blob_inventory_policy_name, # type: Union[str, "_models.BlobInventoryPolicyName"] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param blob_inventory_policy_name: The name of the storage account blob inventory policy. It + should always be 'default'. + :type blob_inventory_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.BlobInventoryPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'blobInventoryPolicyName': self._serialize.url("blob_inventory_policy_name", blob_inventory_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies/{blobInventoryPolicyName}'} # type: ignore + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListBlobInventoryPolicy"] + """Gets the blob inventory policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListBlobInventoryPolicy or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListBlobInventoryPolicy] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobInventoryPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListBlobInventoryPolicy', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/inventoryPolicies'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_services_operations.py new file mode 100644 index 000000000000..9169cd28d4a6 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_blob_services_operations.py @@ -0,0 +1,263 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BlobServicesOperations(object): + """BlobServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.BlobServiceItems"] + """List blob services of storage account. It returns a collection of one object named default. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BlobServiceItems or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.BlobServiceItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('BlobServiceItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices'} # type: ignore + + def set_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.BlobServiceProperties" + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobServiceProperties" + """Sets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Blob service, including properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + blob_services_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'BlobServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore + + def get_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobServiceProperties" + """Gets the properties of a storage account’s Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.BlobServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + blob_services_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BlobServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_deleted_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_deleted_accounts_operations.py new file mode 100644 index 000000000000..24e0c28d0422 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_deleted_accounts_operations.py @@ -0,0 +1,174 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DeletedAccountsOperations(object): + """DeletedAccountsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.DeletedAccountListResult"] + """Lists deleted accounts under the subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DeletedAccountListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.DeletedAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DeletedAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/deletedAccounts'} # type: ignore + + def get( + self, + deleted_account_name, # type: str + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.DeletedAccount" + """Get properties of specified deleted account resource. + + :param deleted_account_name: Name of the deleted storage account. + :type deleted_account_name: str + :param location: The location of the deleted storage account. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeletedAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.DeletedAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'deletedAccountName': self._serialize.url("deleted_account_name", deleted_account_name, 'str', max_length=24, min_length=3), + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DeletedAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/deletedAccounts/{deletedAccountName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_encryption_scopes_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_encryption_scopes_operations.py new file mode 100644 index 000000000000..ffa54c69dd19 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_encryption_scopes_operations.py @@ -0,0 +1,357 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class EncryptionScopesOperations(object): + """EncryptionScopesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def put( + self, + resource_group_name, # type: str + account_name, # type: str + encryption_scope_name, # type: str + encryption_scope, # type: "_models.EncryptionScope" + **kwargs # type: Any + ): + # type: (...) -> "_models.EncryptionScope" + """Synchronously creates or updates an encryption scope under the specified storage account. If an + encryption scope is already created and a subsequent request is issued with different + properties, the encryption scope properties will be updated per the specified request. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the create or update. + :type encryption_scope: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(encryption_scope, 'EncryptionScope') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + def patch( + self, + resource_group_name, # type: str + account_name, # type: str + encryption_scope_name, # type: str + encryption_scope, # type: "_models.EncryptionScope" + **kwargs # type: Any + ): + # type: (...) -> "_models.EncryptionScope" + """Update encryption scope properties as specified in the request body. Update fails if the + specified encryption scope does not already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :param encryption_scope: Encryption scope properties to be used for the update. + :type encryption_scope: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.patch.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(encryption_scope, 'EncryptionScope') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + encryption_scope_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.EncryptionScope" + """Returns the properties for the specified encryption scope. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param encryption_scope_name: The name of the encryption scope within the specified storage + account. Encryption scope names must be between 3 and 63 characters in length and use numbers, + lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and + followed by a letter or number. + :type encryption_scope_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EncryptionScope, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.EncryptionScope + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'encryptionScopeName': self._serialize.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EncryptionScope', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.EncryptionScopeListResult"] + """Lists all the encryption scopes available under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EncryptionScopeListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.EncryptionScopeListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScopeListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('EncryptionScopeListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_services_operations.py new file mode 100644 index 000000000000..0ebb435eb31f --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_services_operations.py @@ -0,0 +1,249 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class FileServicesOperations(object): + """FileServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.FileServiceItems" + """List all file services in storage accounts. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceItems, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceItems + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceItems', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices'} # type: ignore + + def set_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.FileServiceProperties" + **kwargs # type: Any + ): + # type: (...) -> "_models.FileServiceProperties" + """Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of file services in storage accounts, including CORS + (Cross-Origin Resource Sharing) rules. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + file_services_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'FileServicesName': self._serialize.url("file_services_name", file_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'FileServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore + + def get_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.FileServiceProperties" + """Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource + Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + file_services_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'FileServicesName': self._serialize.url("file_services_name", file_services_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_shares_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_shares_operations.py new file mode 100644 index 000000000000..ee6d098cdab5 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_file_shares_operations.py @@ -0,0 +1,639 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class FileSharesOperations(object): + """FileSharesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + maxpagesize=None, # type: Optional[str] + filter=None, # type: Optional[str] + expand=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.FileShareItems"] + """Lists all shares. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional. Specified maximum number of shares that can be included in the + list. + :type maxpagesize: str + :param filter: Optional. When specified, only share names starting with the filter will be + listed. + :type filter: str + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: deleted, snapshots. Should be passed as a string with delimiter ','. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FileShareItems or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.FileShareItems] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShareItems"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('FileShareItems', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares'} # type: ignore + + def create( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + file_share, # type: "_models.FileShare" + expand=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.FileShare" + """Creates a new share under the specified account as described by request body. The share + resource includes metadata and properties for that share. It does not include a list of the + files contained by the share. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param file_share: Properties of the file share to create. + :type file_share: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: snapshots. Should be passed as a string with delimiter ','. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(file_share, 'FileShare') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('FileShare', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + file_share, # type: "_models.FileShare" + **kwargs # type: Any + ): + # type: (...) -> "_models.FileShare" + """Updates share properties as specified in request body. Properties not mentioned in the request + will not be changed. Update fails if the specified share does not already exist. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param file_share: Properties to update for the file share. + :type file_share: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(file_share, 'FileShare') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + expand=None, # type: Optional[str] + x_ms_snapshot=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.FileShare" + """Gets properties of a specified share. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param expand: Optional, used to expand the properties within share's properties. Valid values + are: stats. Should be passed as a string with delimiter ','. + :type expand: str + :param x_ms_snapshot: Optional, used to retrieve properties of a snapshot. + :type x_ms_snapshot: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileShare, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.FileShare + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('FileShare', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + x_ms_snapshot=None, # type: Optional[str] + include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes specified share under its account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param x_ms_snapshot: Optional, used to delete a snapshot. + :type x_ms_snapshot: str + :param include: Optional. Valid values are: snapshots, leased-snapshots, none. The default + value is snapshots. For 'snapshots', the file share is deleted including all of its file share + snapshots. If the file share contains leased-snapshots, the deletion fails. For + 'leased-snapshots', the file share is deleted included all of its file share snapshots + (leased/unleased). For 'none', the file share is deleted if it has no share snapshots. If the + file share contains any snapshots (leased or unleased), the deletion fails. + :type include: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if include is not None: + query_parameters['$include'] = self._serialize.query("include", include, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore + + def restore( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + deleted_share, # type: "_models.DeletedShare" + **kwargs # type: Any + ): + # type: (...) -> None + """Restore a file share within a valid retention days if share soft delete is enabled. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param deleted_share: + :type deleted_share: ~azure.mgmt.storage.v2021_06_01.models.DeletedShare + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.restore.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(deleted_share, 'DeletedShare') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + restore.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/restore'} # type: ignore + + def lease( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + x_ms_snapshot=None, # type: Optional[str] + parameters=None, # type: Optional["_models.LeaseShareRequest"] + **kwargs # type: Any + ): + # type: (...) -> "_models.LeaseShareResponse" + """The Lease Share operation establishes and manages a lock on a share for delete operations. The + lock duration can be 15 to 60 seconds, or can be infinite. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param share_name: The name of the file share within the specified storage account. File share + names must be between 3 and 63 characters in length and use numbers, lower-case letters and + dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter + or number. + :type share_name: str + :param x_ms_snapshot: Optional. Specify the snapshot time to lease a snapshot. + :type x_ms_snapshot: str + :param parameters: Lease Share request body. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.LeaseShareRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LeaseShareResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.LeaseShareResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseShareResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.lease.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_snapshot is not None: + header_parameters['x-ms-snapshot'] = self._serialize.header("x_ms_snapshot", x_ms_snapshot, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, 'LeaseShareRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('LeaseShareResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/lease'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_management_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_management_policies_operations.py new file mode 100644 index 000000000000..d6a65327a004 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_management_policies_operations.py @@ -0,0 +1,249 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagementPoliciesOperations(object): + """ManagementPoliciesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + management_policy_name, # type: Union[str, "_models.ManagementPolicyName"] + **kwargs # type: Any + ): + # type: (...) -> "_models.ManagementPolicy" + """Gets the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagementPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagementPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + account_name, # type: str + management_policy_name, # type: Union[str, "_models.ManagementPolicyName"] + properties, # type: "_models.ManagementPolicy" + **kwargs # type: Any + ): + # type: (...) -> "_models.ManagementPolicy" + """Sets the managementpolicy to the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :param properties: The ManagementPolicy set to a storage account. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagementPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'ManagementPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagementPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + management_policy_name, # type: Union[str, "_models.ManagementPolicyName"] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the managementpolicy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param management_policy_name: The name of the Storage Account Management Policy. It should + always be 'default'. + :type management_policy_name: str or ~azure.mgmt.storage.v2021_06_01.models.ManagementPolicyName + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'managementPolicyName': self._serialize.url("management_policy_name", management_policy_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/managementPolicies/{managementPolicyName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_object_replication_policies_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_object_replication_policies_operations.py new file mode 100644 index 000000000000..8483558c732b --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_object_replication_policies_operations.py @@ -0,0 +1,341 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ObjectReplicationPoliciesOperations(object): + """ObjectReplicationPoliciesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ObjectReplicationPolicies"] + """List the object replication policies associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ObjectReplicationPolicies or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicies] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicies"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ObjectReplicationPolicies', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + object_replication_policy_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ObjectReplicationPolicy" + """Get the object replication policy of the storage account by policy ID. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ObjectReplicationPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + account_name, # type: str + object_replication_policy_id, # type: str + properties, # type: "_models.ObjectReplicationPolicy" + **kwargs # type: Any + ): + # type: (...) -> "_models.ObjectReplicationPolicy" + """Create or update the object replication policy of the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :param properties: The object replication policy set to a storage account. A unique policy ID + will be created if absent. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ObjectReplicationPolicy, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicy + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'ObjectReplicationPolicy') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + object_replication_policy_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the object replication policy associated with the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param object_replication_policy_id: For the destination account, provide the value 'default'. + Configure the policy on the destination account first. For the source account, provide the + value of the policy ID that is returned when you download the policy that was defined on the + destination account. The policy is downloaded as a JSON file. + :type object_replication_policy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_operations.py new file mode 100644 index 000000000000..87ce91d6dabf --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_operations.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.OperationListResult"] + """Lists all of the available Storage Rest API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.Storage/operations'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..5ace766fd21c --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,333 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations(object): + """PrivateEndpointConnectionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PrivateEndpointConnectionListResult"] + """List all the private endpoint connections associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + """Gets the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def put( + self, + resource_group_name, # type: str + account_name, # type: str + private_endpoint_connection_name, # type: str + properties, # type: "_models.PrivateEndpointConnection" + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + """Update the state of specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. + :type properties: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(properties, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the specified private endpoint connection associated with the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..e168aa3b06a2 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_private_link_resources_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_storage_account( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateLinkResourceListResult" + """Gets the private link resources that need to be created for a storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourceListResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.PrivateLinkResourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list_by_storage_account.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_storage_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateLinkResources'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_operations.py new file mode 100644 index 000000000000..3af976e8c8a6 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_operations.py @@ -0,0 +1,430 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class QueueOperations(object): + """QueueOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + resource_group_name, # type: str + account_name, # type: str + queue_name, # type: str + queue, # type: "_models.StorageQueue" + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageQueue" + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(queue, 'StorageQueue') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + account_name, # type: str + queue_name, # type: str + queue, # type: "_models.StorageQueue" + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageQueue" + """Creates a new queue with the specified queue name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :param queue: Queue properties and metadata to be created with. + :type queue: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(queue, 'StorageQueue') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + queue_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageQueue" + """Gets the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageQueue, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageQueue + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageQueue"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageQueue', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + queue_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the queue with the specified queue name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param queue_name: A queue name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, + it should begin and end with an alphanumeric character and it cannot have two consecutive + dash(-) characters. + :type queue_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueName': self._serialize.url("queue_name", queue_name, 'str', max_length=63, min_length=3, pattern=r'^[a-z0-9]([a-z0-9]|(-(?!-))){1,61}[a-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues/{queueName}'} # type: ignore + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + maxpagesize=None, # type: Optional[str] + filter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListQueueResource"] + """Gets a list of all the queues under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param maxpagesize: Optional, a maximum number of queues that should be included in a list + queue response. + :type maxpagesize: str + :param filter: Optional, When specified, only the queues with a name starting with the given + filter will be listed. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListQueueResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListQueueResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListQueueResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if maxpagesize is not None: + query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListQueueResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/default/queues'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_services_operations.py new file mode 100644 index 000000000000..779f6d26cbaf --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_queue_services_operations.py @@ -0,0 +1,249 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class QueueServicesOperations(object): + """QueueServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ListQueueServices" + """List all queue services for the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListQueueServices, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListQueueServices + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListQueueServices"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListQueueServices', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices'} # type: ignore + + def set_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.QueueServiceProperties" + **kwargs # type: Any + ): + # type: (...) -> "_models.QueueServiceProperties" + """Sets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Queue service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: QueueServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueueServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + queue_service_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueServiceName': self._serialize.url("queue_service_name", queue_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'QueueServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueueServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}'} # type: ignore + + def get_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.QueueServiceProperties" + """Gets the properties of a storage account’s Queue service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: QueueServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.QueueServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.QueueServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + queue_service_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'queueServiceName': self._serialize.url("queue_service_name", queue_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('QueueServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/queueServices/{queueServiceName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_skus_operations.py new file mode 100644 index 000000000000..eebde5adeaa6 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_skus_operations.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SkusOperations(object): + """SkusOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.StorageSkuListResult"] + """Lists the available SKUs supported by Microsoft.Storage for given subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageSkuListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageSkuListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSkuListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StorageSkuListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/skus'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_storage_accounts_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_storage_accounts_operations.py new file mode 100644 index 000000000000..a8a7e3f6f67b --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_storage_accounts_operations.py @@ -0,0 +1,1408 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class StorageAccountsOperations(object): + """StorageAccountsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def check_name_availability( + self, + account_name, # type: "_models.StorageAccountCheckNameAvailabilityParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.CheckNameAvailabilityResult" + """Checks that the storage account name is valid and is not already in use. + + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountCheckNameAvailabilityParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CheckNameAvailabilityResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.CheckNameAvailabilityResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.check_name_availability.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(account_name, 'StorageAccountCheckNameAvailabilityParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability'} # type: ignore + + def _create_initial( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.StorageAccountCreateParameters" + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.StorageAccount"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def begin_create( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.StorageAccountCreateParameters" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.StorageAccount"] + """Asynchronously creates a new storage account with the specified parameters. If an account is + already created and a subsequent create request is issued with different properties, the + account properties will be updated. If an account is already created and a subsequent create or + update request is issued with the exact same set of properties, the request will succeed. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for the created account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountCreateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either StorageAccount or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2021_06_01.models.StorageAccount] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a storage account in Microsoft Azure. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def get_properties( + self, + resource_group_name, # type: str + account_name, # type: str + expand=None, # type: Optional[Union[str, "_models.StorageAccountExpand"]] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageAccount" + """Returns the properties for the specified storage account including but not limited to name, SKU + name, location, and account status. The ListKeys operation should be used to retrieve storage + keys. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param expand: May be used to expand the properties within account's properties. By default, + data is not included when fetching properties. Currently we only support geoReplicationStats + and blobRestoreStatus. + :type expand: str or ~azure.mgmt.storage.v2021_06_01.models.StorageAccountExpand + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.StorageAccountUpdateParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageAccount" + """The update operation can be used to update the SKU, encryption, access tier, or tags for a + storage account. It can also be used to map the account to a custom domain. Only one custom + domain is supported per storage account; the replacement/change of custom domain is not + supported. In order to replace an old custom domain, the old value must be cleared/unregistered + before a new value can be set. The update of multiple properties is supported. This call does + not change the storage keys for the account. If you want to change the storage account keys, + use the regenerate keys operation. The location and name of the storage account cannot be + changed after creation. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for the updated account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccount, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccount + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccount', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.StorageAccountListResult"] + """Lists all the storage accounts available under the subscription. Note that storage keys are not + returned; use the ListKeys operation for this. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageAccountListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StorageAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.StorageAccountListResult"] + """Lists all the storage accounts available under the given resource group. Note that storage keys + are not returned; use the ListKeys operation for this. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageAccountListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.StorageAccountListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StorageAccountListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts'} # type: ignore + + def list_keys( + self, + resource_group_name, # type: str + account_name, # type: str + expand="kerb", # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageAccountListKeysResult" + """Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage + account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param expand: Specifies type of the key to be listed. Possible value is kerb. + :type expand: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccountListKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountListKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys'} # type: ignore + + def regenerate_key( + self, + resource_group_name, # type: str + account_name, # type: str + regenerate_key, # type: "_models.StorageAccountRegenerateKeyParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageAccountListKeysResult" + """Regenerates one of the access keys or Kerberos keys for the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2, + kerb1, kerb2. + :type regenerate_key: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountRegenerateKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageAccountListKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.StorageAccountListKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_key.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key, 'StorageAccountRegenerateKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey'} # type: ignore + + def list_account_sas( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.AccountSasParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.ListAccountSasResponse" + """List SAS credentials of a storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide to list SAS credentials for the storage account. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.AccountSasParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListAccountSasResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListAccountSasResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAccountSasResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.list_account_sas.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'AccountSasParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListAccountSasResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_account_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas'} # type: ignore + + def list_service_sas( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.ServiceSasParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.ListServiceSasResponse" + """List service SAS credentials of a specific resource. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide to list service SAS credentials. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.ServiceSasParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListServiceSasResponse, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListServiceSasResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListServiceSasResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.list_service_sas.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ServiceSasParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListServiceSasResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_service_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas'} # type: ignore + + def _failover_initial( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self._failover_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _failover_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignore + + def begin_failover( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Failover request can be triggered for a storage account in case of availability issues. The + failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS + accounts. The secondary cluster will become primary after failover. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._failover_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_failover.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover'} # type: ignore + + def _hierarchical_namespace_migration_initial( + self, + resource_group_name, # type: str + account_name, # type: str + request_type, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._hierarchical_namespace_migration_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + query_parameters['requestType'] = self._serialize.query("request_type", request_type, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _hierarchical_namespace_migration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration'} # type: ignore + + def begin_hierarchical_namespace_migration( + self, + resource_group_name, # type: str + account_name, # type: str + request_type, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param request_type: Required. Hierarchical namespace migration type can either be a + hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request + 'HnsOnHydrationRequest'. The validation request will validate the migration whereas the + hydration request will migrate the account. + :type request_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + request_type=request_type, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_hierarchical_namespace_migration.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration'} # type: ignore + + def _abort_hierarchical_namespace_migration_initial( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self._abort_hierarchical_namespace_migration_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _abort_hierarchical_namespace_migration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration'} # type: ignore + + def begin_abort_hierarchical_namespace_migration( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Abort live Migration of storage account to enable Hns. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._abort_hierarchical_namespace_migration_initial( + resource_group_name=resource_group_name, + account_name=account_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_abort_hierarchical_namespace_migration.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration'} # type: ignore + + def _restore_blob_ranges_initial( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.BlobRestoreParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobRestoreStatus" + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._restore_blob_ranges_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'BlobRestoreParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _restore_blob_ranges_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges'} # type: ignore + + def begin_restore_blob_ranges( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.BlobRestoreParameters" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.BlobRestoreStatus"] + """Restore blobs in the specified blob ranges. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The parameters to provide for restore blob ranges. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.BlobRestoreParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either BlobRestoreStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2021_06_01.models.BlobRestoreStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobRestoreStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._restore_blob_ranges_initial( + resource_group_name=resource_group_name, + account_name=account_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BlobRestoreStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_restore_blob_ranges.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges'} # type: ignore + + def revoke_user_delegation_keys( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Revoke user delegation keys. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + + # Construct URL + url = self.revoke_user_delegation_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + revoke_user_delegation_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_operations.py new file mode 100644 index 000000000000..4ecdea47a615 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_operations.py @@ -0,0 +1,398 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TableOperations(object): + """TableOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + resource_group_name, # type: str + account_name, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Table" + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + account_name, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Table" + """Creates a new table with the specified table name, under the specified account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.patch(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + account_name, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Table" + """Gets the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + account_name, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the table with the specified table name, under the specified account if it exists. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param table_name: A table name must be unique within a storage account and must be between 3 + and 63 characters.The name must comprise of only alphanumeric characters and it cannot begin + with a numeric character. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableName': self._serialize.url("table_name", table_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z][A-Za-z0-9]{2,62}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables/{tableName}'} # type: ignore + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListTableResource"] + """Gets a list of all the tables under the specified storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListTableResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.ListTableResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListTableResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListTableResource', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/default/tables'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_services_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_services_operations.py new file mode 100644 index 000000000000..fd02c93d04b8 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_table_services_operations.py @@ -0,0 +1,249 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TableServicesOperations(object): + """TableServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.ListTableServices" + """List all table services for the storage account. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListTableServices, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.ListTableServices + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListTableServices"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListTableServices', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices'} # type: ignore + + def set_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + parameters, # type: "_models.TableServiceProperties" + **kwargs # type: Any + ): + # type: (...) -> "_models.TableServiceProperties" + """Sets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :param parameters: The properties of a storage account’s Table service, only properties for + Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. + :type parameters: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + table_service_name = "default" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.set_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableServiceName': self._serialize.url("table_service_name", table_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'TableServiceProperties') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}'} # type: ignore + + def get_service_properties( + self, + resource_group_name, # type: str + account_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.TableServiceProperties" + """Gets the properties of a storage account’s Table service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param resource_group_name: The name of the resource group within the user's subscription. The + name is case insensitive. + :type resource_group_name: str + :param account_name: The name of the storage account within the specified resource group. + Storage account names must be between 3 and 24 characters in length and use numbers and + lower-case letters only. + :type account_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.mgmt.storage.v2021_06_01.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + table_service_name = "default" + accept = "application/json" + + # Construct URL + url = self.get_service_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'tableServiceName': self._serialize.url("table_service_name", table_service_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_usages_operations.py b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_usages_operations.py new file mode 100644 index 000000000000..242d70b8225f --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/operations/_usages_operations.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class UsagesOperations(object): + """UsagesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.storage.v2021_06_01.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_location( + self, + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.UsageListResult"] + """Gets the current usage count and the limit for the resources of the location under the + subscription. + + :param location: The location of the Azure Storage resource. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either UsageListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_06_01.models.UsageListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UsageListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_location.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('UsageListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/locations/{location}/usages'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/py.typed b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_06_01/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md index 0142c75381fa..6d927065b034 100644 --- a/sdk/storage/azure-storage-blob/CHANGELOG.md +++ b/sdk/storage/azure-storage-blob/CHANGELOG.md @@ -1,5 +1,12 @@ # Release History +## 12.9.0 (2021-09-15) +**Stable release of preview features** +- Added support for service version 2020-10-02 (STG78) +- Added support for object level immutability policy with versioning (Version Level WORM). +- Added support for listing deleted root blobs that have versions. +- Added OAuth support for sync copy blob source. + ## 12.9.0b1 (2021-07-27) **New Features** - Added support for object level immutability policy with versioning (Version Level WORM). diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index e1b1b51e8e78..902c01338b80 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -176,8 +176,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): container_name = self.container_name diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py index d277a094921a..33b61202934e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py @@ -134,8 +134,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): """Format the endpoint URL according to the current location diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index 82bfe1b04110..59f17d6cfd9b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -156,8 +156,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): container_name = self.container_name diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index 701a88ae408e..d44c5ade481b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -126,13 +126,13 @@ def get_container_cpk_scope_info(kwargs): return None -def get_api_version(kwargs, default): +def get_api_version(kwargs): # type: (Dict[str, Any], str) -> str - api_version = kwargs.pop('api_version', None) + api_version = kwargs.get('api_version', None) if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) - return api_version or default + return api_version or _SUPPORTED_API_VERSIONS[-1] def serialize_blob_tags_header(tags=None): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py index b21fcd5488bb..68dc9539e3f5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.9.0b1" +VERSION = "12.9.0" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index d75943b97c97..97b412ec1b0f 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -120,8 +120,7 @@ def __init__( credential=credential, **kwargs) self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @distributed_trace_async async def get_account_information(self, **kwargs): # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py index d50661d8e2d7..9cb1563f9f43 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py @@ -118,8 +118,7 @@ def __init__( credential=credential, **kwargs) self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @distributed_trace_async async def get_user_delegation_key(self, key_start_time, # type: datetime diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 794bc55c7f84..2f73b9c077e5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -117,8 +117,7 @@ def __init__( credential=credential, **kwargs) self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @distributed_trace_async async def create_container(self, metadata=None, public_access=None, **kwargs): diff --git a/sdk/storage/azure-storage-blob/setup.py b/sdk/storage/azure-storage-blob/setup.py index 41aa96cccedd..dad24446aab3 100644 --- a/sdk/storage/azure-storage-blob/setup.py +++ b/sdk/storage/azure-storage-blob/setup.py @@ -72,7 +72,7 @@ author_email='ascl@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob', classifiers=[ - 'Development Status :: 4 - Beta', + "Development Status :: 5 - Production/Stable", 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', diff --git a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md index 26b20a054ad1..7dee9165ed9f 100644 --- a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 12.5.0 (2021-09-15) +**Stable release of preview features** +- Added support for service version 2020-10-02 (STG78) +- Added support for quick query parquet + ## 12.5.0b1 (2021-07-27) **New features** - Added support for quick query parquet diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index c42391e6071b..042fa0551590 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -527,6 +527,7 @@ def get_file_client(self, file # type: Union[FileProperties, str] ) return DataLakeFileClient( self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, @@ -557,6 +558,7 @@ def get_sub_directory_client(self, sub_directory # type: Union[DirectoryPropert ) return DataLakeDirectoryClient( self.url, self.file_system_name, directory_name=subdir_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py index 211df05827c2..776b98cda9d6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py @@ -20,7 +20,8 @@ from ._data_lake_directory_client import DataLakeDirectoryClient from ._data_lake_file_client import DataLakeFileClient from ._models import UserDelegationKey, FileSystemPropertiesPaged, LocationMode -from ._serialize import convert_dfs_url_to_blob_url +from ._serialize import convert_dfs_url_to_blob_url, get_api_version +from ._generated import AzureDataLakeStorageRESTAPI class DataLakeServiceClient(StorageAccountHostsMixin): @@ -94,6 +95,9 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" + self._client = AzureDataLakeStorageRESTAPI(self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) #pylint: disable=protected-access + def __enter__(self): self._blob_service_client.__enter__() return self @@ -388,6 +392,7 @@ def get_file_system_client(self, file_system # type: Union[FileSystemProperties policies=self._pipeline._impl_policies # pylint: disable = protected-access ) return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, @@ -436,6 +441,7 @@ def get_directory_client(self, file_system, # type: Union[FileSystemProperties, ) return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, @@ -486,6 +492,7 @@ def get_file_client(self, file_system, # type: Union[FileSystemProperties, str] ) return DataLakeFileClient( self.url, file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index 94af506b5a3f..148943c12cda 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -19,7 +19,7 @@ from azure.core.paging import ItemPaged from azure.storage.blob import ContainerClient from ._shared.base_client import TransportWrapper, StorageAccountHostsMixin, parse_query, parse_connection_str -from ._serialize import convert_dfs_url_to_blob_url +from ._serialize import convert_dfs_url_to_blob_url, get_api_version from ._list_paths_helper import DeletedPathPropertiesPaged from ._models import LocationMode, FileSystemProperties, PublicAccess, DeletedPathProperties, FileProperties, \ DirectoryProperties @@ -105,9 +105,12 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" self._client = AzureDataLakeStorageRESTAPI(self.url, file_system=file_system_name, pipeline=self._pipeline) + api_version = get_api_version(kwargs) + self._client._config.version = api_version # pylint: disable=protected-access self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI(self._container_client.url, file_system=file_system_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation._config.version = api_version # pylint: disable=protected-access def _format_url(self, hostname): file_system_name = self.file_system_name @@ -839,6 +842,7 @@ def get_directory_client(self, directory # type: Union[DirectoryProperties, str ) return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, @@ -879,6 +883,7 @@ def get_file_client(self, file_path # type: Union[FileProperties, str] ) return DataLakeFileClient( self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index 8f0f58869fe3..16983811ed5a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -22,7 +22,8 @@ from ._models import LocationMode, DirectoryProperties, AccessControlChangeResult, AccessControlChanges, \ AccessControlChangeCounters, AccessControlChangeFailure from ._serialize import convert_dfs_url_to_blob_url, get_mod_conditions, \ - get_path_http_headers, add_metadata_headers, get_lease_id, get_source_mod_conditions, get_access_conditions + get_path_http_headers, add_metadata_headers, get_lease_id, get_source_mod_conditions, get_access_conditions, \ + get_api_version from ._shared.base_client import StorageAccountHostsMixin, parse_query from ._shared.response_handlers import return_response_headers, return_headers_and_deserialized @@ -81,13 +82,18 @@ def __init__( _hosts=datalake_hosts, **kwargs) # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" + api_version = get_api_version(kwargs) + self._client = AzureDataLakeStorageRESTAPI(self.url, file_system=file_system_name, path=path_name, pipeline=self._pipeline) + self._client._config.version = api_version # pylint: disable=protected-access + self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI( self._blob_client.url, file_system=file_system_name, path=path_name, pipeline=self._pipeline) + self._datalake_client_for_blob_operation._config.version = api_version # pylint: disable=protected-access def __exit__(self, *args): self._blob_client.close() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index 9d700bfb029f..304e043629fa 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -9,6 +9,28 @@ SourceModifiedAccessConditions, LeaseAccessConditions +_SUPPORTED_API_VERSIONS = [ + '2019-02-02', + '2019-07-07', + '2019-10-10', + '2019-12-12', + '2020-02-10', + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02' +] + + +def get_api_version(kwargs): + # type: (Dict[str, Any], str) -> str + api_version = kwargs.get('api_version', None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = '\n'.join(_SUPPORTED_API_VERSIONS) + raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) + return api_version or _SUPPORTED_API_VERSIONS[-1] + + def convert_dfs_url_to_blob_url(dfs_account_url): return dfs_account_url.replace('.dfs.', '.blob.', 1) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_version.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_version.py index bfd3230dfa42..d731da5c4072 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_version.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.5.0b1" +VERSION = "12.5.0" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index 7d0adefd2680..6254efbbfa9f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -506,6 +506,7 @@ def get_file_client(self, file # type: Union[FileProperties, str] ) return DataLakeFileClient( self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, _location_mode=self._location_mode, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, @@ -545,6 +546,7 @@ def get_sub_directory_client(self, sub_directory # type: Union[DirectoryPropert ) return DataLakeDirectoryClient( self.url, self.file_system_name, directory_name=subdir_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, _location_mode=self._location_mode, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py index 76493e89c5b6..44aae0a5e85f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py @@ -10,6 +10,7 @@ from azure.core.pipeline import AsyncPipeline from azure.storage.blob.aio import BlobServiceClient +from .._serialize import get_api_version from .._generated.aio import AzureDataLakeStorageRESTAPI from .._deserialize import get_datalake_service_properties from .._shared.base_client_async import AsyncTransportWrapper, AsyncStorageAccountHostsMixin @@ -80,6 +81,7 @@ def __init__( self._blob_service_client = BlobServiceClient(self._blob_account_url, credential, **kwargs) self._blob_service_client._hosts[LocationMode.SECONDARY] = "" #pylint: disable=protected-access self._client = AzureDataLakeStorageRESTAPI(self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) #pylint: disable=protected-access self._loop = kwargs.get('loop', None) async def __aenter__(self): @@ -337,6 +339,7 @@ def get_file_system_client(self, file_system # type: Union[FileSystemProperties policies=self._pipeline._impl_policies # pylint: disable = protected-access ) return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, @@ -385,6 +388,7 @@ def get_directory_client(self, file_system, # type: Union[FileSystemProperties, ) return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, @@ -435,6 +439,7 @@ def get_file_client(self, file_system, # type: Union[FileSystemProperties, str] ) return DataLakeFileClient( self.url, file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py index 02f71424e1a0..90ace3e50697 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py @@ -18,6 +18,7 @@ from azure.core.tracing.decorator_async import distributed_trace_async from azure.storage.blob.aio import ContainerClient +from .._serialize import get_api_version from .._deserialize import process_storage_error, is_file_path from .._generated.models import ListBlobsIncludeItem @@ -98,6 +99,10 @@ def __init__( self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI(self._container_client.url, file_system=file_system_name, pipeline=self._pipeline) + api_version = get_api_version(kwargs) + self._client._config.version = api_version # pylint: disable=protected-access + self._datalake_client_for_blob_operation._config.version = api_version # pylint: disable=protected-access + self._loop = kwargs.get('loop', None) async def __aexit__(self, *args): @@ -787,6 +792,7 @@ def get_directory_client(self, directory # type: Union[DirectoryProperties, str ) return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, credential=self._raw_credential, + api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, @@ -828,6 +834,7 @@ def get_file_client(self, file_path # type: Union[FileProperties, str] ) return DataLakeFileClient( self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + api_version=self.api_version, _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 80c43d77704d..42d783c4350d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -9,6 +9,7 @@ from azure.core.exceptions import AzureError, HttpResponseError from azure.storage.blob.aio import BlobClient +from .._serialize import get_api_version from .._shared.base_client_async import AsyncStorageAccountHostsMixin from .._path_client import PathClient as PathClientBase from .._models import DirectoryProperties, AccessControlChangeResult, AccessControlChangeFailure, \ @@ -53,6 +54,10 @@ def __init__( file_system=file_system_name, path=path_name, pipeline=self._pipeline) + api_version = get_api_version(kwargs) + self._client._config.version = api_version # pylint: disable=protected-access + self._datalake_client_for_blob_operation._config.version = api_version # pylint: disable=protected-access + self._loop = kwargs.get('loop', None) async def __aexit__(self, *args): diff --git a/sdk/storage/azure-storage-file-datalake/setup.py b/sdk/storage/azure-storage-file-datalake/setup.py index 371bef74bbbb..7c4e0eea3688 100644 --- a/sdk/storage/azure-storage-file-datalake/setup.py +++ b/sdk/storage/azure-storage-file-datalake/setup.py @@ -73,7 +73,7 @@ author_email='ascl@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python', classifiers=[ - 'Development Status :: 4 - Beta', + "Development Status :: 5 - Production/Stable", 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py index 95afd9d4a3c4..f92fd49a1745 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_directory.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory.py @@ -16,6 +16,8 @@ generate_file_system_sas, FileSystemSasPermissions from azure.storage.filedatalake import DataLakeServiceClient, generate_directory_sas from azure.storage.filedatalake._models import AccessControlChangeResult, AccessControlChangeCounters +from azure.storage.filedatalake._serialize import _SUPPORTED_API_VERSIONS + from testcase import ( StorageTestCase, @@ -1113,6 +1115,29 @@ def test_using_directory_sas_to_create_file(self, datalake_storage_account_name, with self.assertRaises(HttpResponseError): directory_client.delete_directory() + + @DataLakePreparer() + def test_using_directory_sas_to_create_file(self, datalake_storage_account_name, datalake_storage_account_key): + newest_api_version = _SUPPORTED_API_VERSIONS[-1] + + service_client = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake') + filesys_client = service_client.get_file_system_client("filesys") + dir_client = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake') + file_client = dir_client.get_file_client("file") + self.assertEqual(service_client.api_version, newest_api_version) + self.assertEqual(filesys_client.api_version, newest_api_version) + self.assertEqual(dir_client.api_version, newest_api_version) + self.assertEqual(file_client.api_version, newest_api_version) + + service_client2 = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake', api_version="2019-02-02") + filesys_client2 = service_client2.get_file_system_client("filesys") + dir_client2 = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake', api_version="2019-02-02") + file_client2 = dir_client2.get_file_client("file") + self.assertEqual(service_client2.api_version, "2019-02-02") + self.assertEqual(filesys_client2.api_version, "2019-02-02") + self.assertEqual(dir_client2.api_version, "2019-02-02") + self.assertEqual(file_client2.api_version, "2019-02-02") + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py index 6a2845d39e8c..75b6d0c3d5e9 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_directory_async.py @@ -21,6 +21,7 @@ from azure.storage.filedatalake import generate_directory_sas from azure.storage.filedatalake.aio import DataLakeServiceClient, DataLakeDirectoryClient from azure.storage.filedatalake import AccessControlChangeResult, AccessControlChangeCounters +from azure.storage.filedatalake._serialize import _SUPPORTED_API_VERSIONS from asynctestcase import ( StorageTestCase, @@ -1086,6 +1087,29 @@ async def test_using_directory_sas_to_create_async(self, datalake_storage_accoun response = await directory_client.create_directory() self.assertIsNotNone(response) + @DataLakePreparer() + def test_using_directory_sas_to_create_file(self, datalake_storage_account_name, datalake_storage_account_key): + newest_api_version = _SUPPORTED_API_VERSIONS[-1] + + service_client = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake') + filesys_client = service_client.get_file_system_client("filesys") + dir_client = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake') + file_client = dir_client.get_file_client("file") + self.assertEqual(service_client.api_version, newest_api_version) + self.assertEqual(filesys_client.api_version, newest_api_version) + self.assertEqual(dir_client.api_version, newest_api_version) + self.assertEqual(file_client.api_version, newest_api_version) + + service_client2 = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake', + api_version="2019-02-02") + filesys_client2 = service_client2.get_file_system_client("filesys") + dir_client2 = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake', + api_version="2019-02-02") + file_client2 = dir_client2.get_file_client("file") + self.assertEqual(service_client2.api_version, "2019-02-02") + self.assertEqual(filesys_client2.api_version, "2019-02-02") + self.assertEqual(dir_client2.api_version, "2019-02-02") + self.assertEqual(file_client2.api_version, "2019-02-02") # ------------------------------------------------------------------------------ if __name__ == '__main__': diff --git a/sdk/storage/azure-storage-file-share/CHANGELOG.md b/sdk/storage/azure-storage-file-share/CHANGELOG.md index 38fb1e05b25d..e61e3980d687 100644 --- a/sdk/storage/azure-storage-file-share/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-share/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 12.6.0 (2021-09-15) +**Stable release of preview features** +- Added support for service version 2020-10-02 (STG78) +- Added OAuth support for file copy source. + ## 12.6.0b1 (2021-07-27) **New features** - Added OAuth support for file copy source. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py index 7f8b22d5a56d..1c9d5f176ebd 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py @@ -118,8 +118,7 @@ def __init__( # type: ignore sas_token, credential, share_snapshot=self.snapshot) super(ShareDirectoryClient, self).__init__(parsed_url, service='file-share', credential=credential, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @classmethod def from_directory_url(cls, directory_url, # type: str diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index 24d731fe7885..5f8f979d0376 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -172,8 +172,7 @@ def __init__( # type: ignore sas_token, credential, share_snapshot=self.snapshot) super(ShareFileClient, self).__init__(parsed_url, service='file-share', credential=credential, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @classmethod def from_file_url( diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py index 6fa74259f62c..9a050dae3339 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py @@ -14,6 +14,7 @@ _SUPPORTED_API_VERSIONS = [ '2019-02-02', '2019-07-07', + '2019-10-10', '2019-12-12', '2020-02-10', '2020-04-08', @@ -108,10 +109,11 @@ def get_smb_properties(kwargs): } -def get_api_version(kwargs, default): + +def get_api_version(kwargs): # type: (Dict[str, Any]) -> str - api_version = kwargs.pop('api_version', None) + api_version = kwargs.get('api_version', None) if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) - return api_version or default + return api_version or _SUPPORTED_API_VERSIONS[-1] diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py index 3d9f707973b9..a0b317bde7ef 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py @@ -115,8 +115,7 @@ def __init__( # type: ignore sas_token, credential, share_snapshot=self.snapshot) super(ShareClient, self).__init__(parsed_url, service='file-share', credential=credential, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access @classmethod def from_share_url(cls, share_url, # type: str diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py index 3f2de840b603..537a0e5ee2fc 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py @@ -105,8 +105,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(ShareServiceClient, self).__init__(parsed_url, service='file-share', credential=credential, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): """Format the endpoint URL according to the current location diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_version.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_version.py index 202620c82c6d..c9d0e6003207 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_version.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.6.0b1" +VERSION = "12.6.0" diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py index 8da17a039362..ae7767da2a60 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py @@ -90,8 +90,7 @@ def __init__( # type: ignore loop=loop, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline, loop=loop) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop def get_file_client(self, file_name, **kwargs): diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py index a5c936efc4b5..f4ee50fb1aad 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py @@ -137,8 +137,7 @@ def __init__( # type: ignore credential=credential, loop=loop, **kwargs ) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline, loop=loop) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop @distributed_trace_async diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py index 0d3d61778400..f8ded64403b5 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py @@ -85,8 +85,7 @@ def __init__( # type: ignore loop=loop, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline, loop=loop) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop def get_directory_client(self, directory_path=None): diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py index d6df0334d5c1..dc9ef0823a3f 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py @@ -90,8 +90,7 @@ def __init__( loop=loop, **kwargs) self._client = AzureFileStorage(url=self.url, pipeline=self._pipeline, loop=loop) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop @distributed_trace_async diff --git a/sdk/storage/azure-storage-file-share/setup.py b/sdk/storage/azure-storage-file-share/setup.py index af2e8d3ebf1a..752f8c5df2af 100644 --- a/sdk/storage/azure-storage-file-share/setup.py +++ b/sdk/storage/azure-storage-file-share/setup.py @@ -59,7 +59,7 @@ author_email='ascl@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-share', classifiers=[ - 'Development Status :: 4 - Beta', + "Development Status :: 5 - Production/Stable", 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py index 14ca7acf62d9..677105147ebe 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py @@ -19,6 +19,7 @@ from azure.core.exceptions import HttpResponseError from azure.core.paging import ItemPaged from azure.core.tracing.decorator import distributed_trace +from ._serialize import get_api_version from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query from ._shared.request_handlers import add_metadata_headers, serialize_iso from ._shared.response_handlers import ( @@ -105,8 +106,7 @@ def __init__( self._config.message_encode_policy = kwargs.get('message_encode_policy', None) or NoEncodePolicy() self._config.message_decode_policy = kwargs.get('message_decode_policy', None) or NoDecodePolicy() self._client = AzureQueueStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = kwargs.get('api_version', default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): """Format the endpoint URL according to the current location diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py index cdaf88032695..ee4595fb74eb 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py @@ -17,6 +17,7 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import Pipeline from azure.core.tracing.decorator import distributed_trace +from ._serialize import get_api_version from ._shared.models import LocationMode from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query from ._shared.response_handlers import process_storage_error @@ -108,8 +109,7 @@ def __init__( self._query_str, credential = self._format_query_string(sas_token, credential) super(QueueServiceClient, self).__init__(parsed_url, service='queue', credential=credential, **kwargs) self._client = AzureQueueStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = kwargs.get('api_version', default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access def _format_url(self, hostname): """Format the endpoint URL according to the current location diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py new file mode 100644 index 000000000000..6aafbc170bec --- /dev/null +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py @@ -0,0 +1,26 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +_SUPPORTED_API_VERSIONS = [ + '2019-02-02', + '2019-07-07', + '2019-10-10', + '2019-12-12', + '2020-02-10', + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02' +] + + +def get_api_version(kwargs): + # type: (Dict[str, Any], str) -> str + api_version = kwargs.get('api_version', None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = '\n'.join(_SUPPORTED_API_VERSIONS) + raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) + return api_version or _SUPPORTED_API_VERSIONS[-1] diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py index 34ecba6b6fe2..4b8f3f0c4282 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py @@ -31,6 +31,7 @@ from azure.core.async_paging import AsyncItemPaged +from .._serialize import get_api_version from .._shared.base_client_async import AsyncStorageAccountHostsMixin from .._shared.request_handlers import add_metadata_headers, serialize_iso from .._shared.response_handlers import ( @@ -111,8 +112,7 @@ def __init__( account_url, queue_name=queue_name, credential=credential, loop=loop, **kwargs ) self._client = AzureQueueStorage(self.url, pipeline=self._pipeline, loop=loop) # type: ignore - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = kwargs.get('api_version', default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop @distributed_trace_async diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py index 805ed41b7cc5..ee0fc0f8af22 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py @@ -20,6 +20,7 @@ from azure.core.pipeline import AsyncPipeline from azure.core.tracing.decorator_async import distributed_trace_async +from .._serialize import get_api_version from .._shared.policies_async import ExponentialRetry from .._queue_service_client import QueueServiceClient as QueueServiceClientBase from .._shared.models import LocationMode @@ -101,8 +102,7 @@ def __init__( loop=loop, **kwargs) self._client = AzureQueueStorage(url=self.url, pipeline=self._pipeline, loop=loop) # type: ignore - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = kwargs.get('api_version', default_api_version) # pylint: disable=protected-access + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access self._loop = loop @distributed_trace_async diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py b/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py index f03aaa5b2c68..55fc24674b21 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py @@ -13,6 +13,7 @@ QueueServiceClient, QueueClient ) +from azure.storage.queue._serialize import _SUPPORTED_API_VERSIONS from _shared.testcase import GlobalStorageAccountPreparer, StorageTestCase # ------------------------------------------------------------------------------ @@ -21,7 +22,7 @@ class StorageClientTest(StorageTestCase): def setUp(self): super(StorageClientTest, self).setUp() self.api_version_1 = "2019-02-02" - self.api_version_2 = '2018-03-28' + self.api_version_2 = _SUPPORTED_API_VERSIONS[-1] # --Test Cases-------------------------------------------------------------- @@ -46,7 +47,7 @@ def test_service_client_api_version_property(self): self.assertEqual(queue_client.api_version, self.api_version_1) self.assertEqual(queue_client._client._config.version, self.api_version_1) - def test_blob_client_api_version_property(self): + def test_queue_client_api_version_property(self): queue_client = QueueClient( "https://foo.queue.core.windows.net/account", "queue_name", diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py index 6433f57fa043..b397846355e7 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py @@ -13,6 +13,7 @@ QueueServiceClient, QueueClient ) +from azure.storage.queue._serialize import _SUPPORTED_API_VERSIONS from _shared.testcase import GlobalStorageAccountPreparer from _shared.asynctestcase import AsyncStorageTestCase @@ -22,7 +23,7 @@ class StorageClientTest(AsyncStorageTestCase): def setUp(self): super(StorageClientTest, self).setUp() self.api_version_1 = "2019-02-02" - self.api_version_2 = '2018-03-28' + self.api_version_2 = _SUPPORTED_API_VERSIONS[-1] # --Test Cases-------------------------------------------------------------- @@ -47,7 +48,7 @@ def test_service_client_api_version_property(self): self.assertEqual(queue_client.api_version, self.api_version_1) self.assertEqual(queue_client._client._config.version, self.api_version_1) - def test_blob_client_api_version_property(self): + def test_queue_client_api_version_property(self): queue_client = QueueClient( "https://foo.queue.core.windows.net/account", "queue_name", diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client.py b/sdk/storage/azure-storage-queue/tests/test_queue_client.py index 75fe784dbf5a..5bea41109afe 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client.py @@ -12,6 +12,7 @@ QueueServiceClient, QueueClient, ) +from azure.storage.queue._serialize import _SUPPORTED_API_VERSIONS from _shared.testcase import GlobalStorageAccountPreparer, StorageTestCase # ------------------------------------------------------------------------------ @@ -484,6 +485,8 @@ def test_closing_pipeline_client_simple(self, resource_group, location, storage_ service = client( self.account_url(storage_account, "queue"), credential=storage_account_key, queue_name='queue') service.close() + + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py index 9710c3eb3305..d2c91d8c29b8 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py @@ -15,6 +15,7 @@ QueueServiceClient, QueueClient ) +from azure.storage.queue._serialize import _SUPPORTED_API_VERSIONS from _shared.asynctestcase import AsyncStorageTestCase from _shared.testcase import GlobalStorageAccountPreparer # ------------------------------------------------------------------------------ @@ -482,6 +483,7 @@ async def test_closing_pipeline_client_simple_async(self, resource_group, locati self.account_url(storage_account, "queue"), credential=storage_account_key, queue_name='queue') await service.close() + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/shared_requirements.txt b/shared_requirements.txt index 706088d47b13..adb8d8c77f7c 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -346,3 +346,4 @@ opentelemetry-sdk<2.0.0,>=1.0.0 #override azure-mgmt-security msrest>=0.6.21 #override azure-mgmt-apimanagement msrest>=0.6.21 #override azure-mgmt-relay msrest>=0.6.21 +#override azure-mgmt-azurearcdata msrest>=0.6.21 diff --git a/tools/azure-sdk-tools/devtools_testutils/__init__.py b/tools/azure-sdk-tools/devtools_testutils/__init__.py index 69db6f41dfb3..c58ea74788c2 100644 --- a/tools/azure-sdk-tools/devtools_testutils/__init__.py +++ b/tools/azure-sdk-tools/devtools_testutils/__init__.py @@ -1,5 +1,5 @@ from .mgmt_testcase import AzureMgmtTestCase, AzureMgmtPreparer -from .azure_recorded_testcase import AzureRecordedTestCase +from .azure_recorded_testcase import add_sanitizer, AzureRecordedTestCase from .azure_testcase import AzureTestCase, is_live, get_region_override from .resource_testcase import ( FakeResource, @@ -21,6 +21,7 @@ from .fake_credential import FakeTokenCredential __all__ = [ + "add_sanitizer", "AzureMgmtTestCase", "AzureMgmtPreparer", "AzureRecordedTestCase", diff --git a/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py b/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py index d39cfe79960c..96452251da3d 100644 --- a/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py +++ b/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py @@ -31,12 +31,42 @@ pass if TYPE_CHECKING: - from typing import Optional + from typing import Any load_dotenv(find_dotenv()) +def add_sanitizer(sanitizer, **kwargs): + # type: (ProxyRecordingSanitizer, **Any) -> None + """Registers a sanitizer, matcher, or transform with the test proxy. + + :param sanitizer: The name of the sanitizer, matcher, or transform you want to add. + :type sanitizer: ProxyRecordingSanitizer or str + + :keyword str value: The substitution value. + :keyword str regex: A regex for a sanitizer. Can be defined as a simple regex, or if a ``group_for_replace`` is + provided, a substitution operation. + :keyword str group_for_replace: The capture group that needs to be operated upon. Do not provide if you're invoking + a simple replacement operation. + """ + request_args = {} + request_args["value"] = kwargs.get("value") or "fakevalue" + request_args["regex"] = kwargs.get("regex") or "[a-z]+(?=(?:-secondary)\\.(?:table|blob|queue)\\.core\\.windows\\.net)" + request_args["group_for_replace"] = kwargs.get("group_for_replace") + + if sanitizer == ProxyRecordingSanitizer.URI: + requests.post( + "{}/Admin/AddSanitizer".format(PROXY_URL), + headers={"x-abstraction-identifier": ProxyRecordingSanitizer.URI.value}, + json={ + "regex": request_args["regex"], + "value": request_args["value"], + "groupForReplace": request_args["group_for_replace"] + }, + ) + + def is_live(): """A module version of is_live, that could be used in pytest marker.""" if not hasattr(is_live, "_cache"): @@ -81,18 +111,6 @@ def in_recording(self): def recording_processors(self): return [] - def add_sanitizer(self, sanitizer, regex=None, value=None): - # type: (ProxyRecordingSanitizer, Optional[str], Optional[str]) -> None - if sanitizer == ProxyRecordingSanitizer.URI: - requests.post( - "{}/Admin/AddSanitizer".format(PROXY_URL), - headers={"x-abstraction-identifier": ProxyRecordingSanitizer.URI.value}, - json={ - "regex": regex or "[a-z]+(?=(?:-secondary)\\.(?:table|blob|queue)\\.core\\.windows\\.net)", - "value": value or "fakevalue" - }, - ) - def is_playback(self): return not self.is_live diff --git a/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py b/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py index 53cc41bf9064..78918ab0063b 100644 --- a/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py +++ b/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py @@ -24,6 +24,10 @@ from .config import PROXY_URL +# To learn about how to migrate SDK tests to the test proxy, please refer to the migration guide at +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md + + # defaults RECORDING_START_URL = "{}/record/start".format(PROXY_URL) RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL) diff --git a/tools/azure-sdk-tools/packaging_tools/change_log.py b/tools/azure-sdk-tools/packaging_tools/change_log.py index 64d2ebe0b6b2..10fc7ae47fff 100644 --- a/tools/azure-sdk-tools/packaging_tools/change_log.py +++ b/tools/azure-sdk-tools/packaging_tools/change_log.py @@ -142,7 +142,8 @@ def models(self, diff_entry): def build_change_log(old_report, new_report): change_log = ChangeLog(old_report, new_report) - result = diff(old_report, new_report) + # when diff result is large, compare_lengths=True may cause wrong result + result = diff(old_report, new_report, compare_lengths=False) for diff_line in result: # Operations