From 9a69d40a7fd08fadf71b05c70a2a12cb5bcef172 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 29 Mar 2021 06:01:37 +0000 Subject: [PATCH] CodeGen from PR 13689 in Azure/azure-rest-api-specs Dev sql microsoft.sql 2020 11 01 preview minor change (#13689) * Adds base for updating Microsoft.Sql from version preview/2020-08-01-preview to version 2020-11-01-preview * Updates readme * Updates API version in new specs and examples * Add updated 2020-11-01-preview versions of LongTermRetenionBackups.json and Databases.json (#12146) * add updated databases.json swagger * update readme with new package * v4 -> v5 * add missing chunk in readme * add correct swaggeR * add examples * add correct examplese * remove unreferenced examples * update swagger and examples. add 2020-11 pure package * Fix reference to 2020-08-01 reference to ElasticPools.json * update Databases.json name * update examples * remove nonexistant swagger * fixed reference capitals in readme * update package composite and fix ref to ManagedInstances.jsON * make backupstorageredundancy settings consistent * add requestedBackupStorageRedundancy to properties object in example * add LTR swagger and examples * add LTR Policies swagger and examples * make v5 match v4 * add updated Databses.json * merge conflicts in LTR Policies swagger' * take out comments in readme * taking ImportExport.json out from 2020-11 package * remove duplicate 2020-11 tag in readme * modify Databases_legacy.json to exclude ImportExport-related APIs * add servers.json for IE APIS and remove unused databases examples * fix model & prettier errors; add missing server example * add missing servers examples * fix example to exclude property from server obj * change RecommendedActions definitoins * change tag to composite v4 * Add missing properties for api version 2020-02-02-preview of securityAlertPolicies (#13117) * Add missing properties for api version 2020-02-02-preview * fix checks * fix according to prettier check * Add the change to version 2020-08-01-preview and version 2020-11-01-preview * remove changes in package-lock * Add new line at the end of file * update version 2020-11-01-preview * Update the ref to system data in serverdevopsaudit * add missing point * fix the validation errors * Delete ServerDevOpsAudit.json * fix the validation errors * re add ServerDevOpsAudit.json * fix the validation errors (#13325) * fix the validation errors * Delete ServerDevOpsAudit.json * fix the validation errors * re add ServerDevOpsAudit.json * address PrivateEndpointConnections.json validation issues in v3, v2 and v1 * Address Private Endpoint validation issues in V3, V2 and V1 SDK tags (#13404) * fix the validation errors * Delete ServerDevOpsAudit.json * fix the validation errors * re add ServerDevOpsAudit.json * address PrivateEndpointConnections.json validation issues in v3, v2 and v1 * Identity/Key/external Admin API (#13411) * Add examples for sql mi remove maintenance configuration (#13390) * Add missing specs 2020 11 01 (#13488) * fix the validation errors * Delete ServerDevOpsAudit.json * fix the validation errors * re add ServerDevOpsAudit.json * address PrivateEndpointConnections.json validation issues in v3, v2 and v1 * add missing specs in 2020-11-01-preview * update the validation errors * update systemData and sync with global common/v1/types.json * update legacy to match the latest database specs * remove RestorableDroppedManagedInstances due to it should be internal * Updated database extensions (#13584) Co-authored-by: Geetha Athreya Co-authored-by: xaliciayang <59986952+xaliciayang@users.noreply.github.com> Co-authored-by: ayeletshpigelman Co-authored-by: strehan1993 <70013163+strehan1993@users.noreply.github.com> Co-authored-by: Ivan Kulezic <72797230+kukislav@users.noreply.github.com> Co-authored-by: Geetha Athreya <43223869+athreya-geetha@users.noreply.github.com> Co-authored-by: Geetha Athreya --- .../azure/mgmt/sql/_sql_management_client.py | 35 +- .../azure/mgmt/sql/models/__init__.py | 65 +- .../azure/mgmt/sql/models/_models.py | 345 +++++++--- .../azure/mgmt/sql/models/_models_py3.py | 353 +++++++--- .../azure/mgmt/sql/models/_paged_models.py | 65 +- .../models/_sql_management_client_enums.py | 116 ++-- .../azure/mgmt/sql/operations/__init__.py | 14 +- .../sql/operations/_databases_operations.py | 457 ++++--------- .../operations/_import_export_operations.py | 150 +++++ ..._long_term_retention_backups_operations.py | 628 +++++++++++++++--- ...ion_managed_instance_backups_operations.py | 12 +- ...ong_term_retention_policies_operations.py} | 128 ++-- ...private_endpoint_connections_operations.py | 4 +- .../sql/operations/_servers_operations.py | 135 +++- 14 files changed, 1663 insertions(+), 844 deletions(-) create mode 100644 sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_import_export_operations.py rename sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/{_backup_long_term_retention_policies_operations.py => _long_term_retention_policies_operations.py} (75%) diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/_sql_management_client.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/_sql_management_client.py index 29c37824309d..d4fe13321ace 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/_sql_management_client.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/_sql_management_client.py @@ -58,8 +58,6 @@ from .operations import JobTargetExecutionsOperations from .operations import JobTargetGroupsOperations from .operations import JobVersionsOperations -from .operations import LongTermRetentionBackupsOperations -from .operations import BackupLongTermRetentionPoliciesOperations from .operations import ManagedBackupShortTermRetentionPoliciesOperations from .operations import ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations from .operations import ServerAutomaticTuningOperations @@ -89,7 +87,6 @@ from .operations import ManagedDatabaseSensitivityLabelsOperations from .operations import InstancePoolsOperations from .operations import UsagesOperations -from .operations import PrivateEndpointConnectionsOperations from .operations import PrivateLinkResourcesOperations from .operations import ServersOperations from .operations import CapabilitiesOperations @@ -101,6 +98,7 @@ from .operations import ServerAzureADAdministratorsOperations from .operations import SyncGroupsOperations from .operations import SyncMembersOperations +from .operations import ImportExportOperations from .operations import ManagedDatabasesOperations from .operations import ManagedDatabaseRestoreDetailsOperations from .operations import ServerAzureADOnlyAuthenticationsOperations @@ -108,6 +106,9 @@ from .operations import ManagedInstanceAzureADOnlyAuthenticationsOperations from .operations import ServerTrustGroupsOperations from .operations import ServerDevOpsAuditSettingsOperations +from .operations import LongTermRetentionBackupsOperations +from .operations import LongTermRetentionPoliciesOperations +from .operations import PrivateEndpointConnectionsOperations from . import models @@ -207,10 +208,6 @@ class SqlManagementClient(SDKClient): :vartype job_target_groups: azure.mgmt.sql.operations.JobTargetGroupsOperations :ivar job_versions: JobVersions operations :vartype job_versions: azure.mgmt.sql.operations.JobVersionsOperations - :ivar long_term_retention_backups: LongTermRetentionBackups operations - :vartype long_term_retention_backups: azure.mgmt.sql.operations.LongTermRetentionBackupsOperations - :ivar backup_long_term_retention_policies: BackupLongTermRetentionPolicies operations - :vartype backup_long_term_retention_policies: azure.mgmt.sql.operations.BackupLongTermRetentionPoliciesOperations :ivar managed_backup_short_term_retention_policies: ManagedBackupShortTermRetentionPolicies operations :vartype managed_backup_short_term_retention_policies: azure.mgmt.sql.operations.ManagedBackupShortTermRetentionPoliciesOperations :ivar managed_restorable_dropped_database_backup_short_term_retention_policies: ManagedRestorableDroppedDatabaseBackupShortTermRetentionPolicies operations @@ -269,8 +266,6 @@ class SqlManagementClient(SDKClient): :vartype instance_pools: azure.mgmt.sql.operations.InstancePoolsOperations :ivar usages: Usages operations :vartype usages: azure.mgmt.sql.operations.UsagesOperations - :ivar private_endpoint_connections: PrivateEndpointConnections operations - :vartype private_endpoint_connections: azure.mgmt.sql.operations.PrivateEndpointConnectionsOperations :ivar private_link_resources: PrivateLinkResources operations :vartype private_link_resources: azure.mgmt.sql.operations.PrivateLinkResourcesOperations :ivar servers: Servers operations @@ -293,6 +288,8 @@ class SqlManagementClient(SDKClient): :vartype sync_groups: azure.mgmt.sql.operations.SyncGroupsOperations :ivar sync_members: SyncMembers operations :vartype sync_members: azure.mgmt.sql.operations.SyncMembersOperations + :ivar import_export: ImportExport operations + :vartype import_export: azure.mgmt.sql.operations.ImportExportOperations :ivar managed_databases: ManagedDatabases operations :vartype managed_databases: azure.mgmt.sql.operations.ManagedDatabasesOperations :ivar managed_database_restore_details: ManagedDatabaseRestoreDetails operations @@ -307,6 +304,12 @@ class SqlManagementClient(SDKClient): :vartype server_trust_groups: azure.mgmt.sql.operations.ServerTrustGroupsOperations :ivar server_dev_ops_audit_settings: ServerDevOpsAuditSettings operations :vartype server_dev_ops_audit_settings: azure.mgmt.sql.operations.ServerDevOpsAuditSettingsOperations + :ivar long_term_retention_backups: LongTermRetentionBackups operations + :vartype long_term_retention_backups: azure.mgmt.sql.operations.LongTermRetentionBackupsOperations + :ivar long_term_retention_policies: LongTermRetentionPolicies operations + :vartype long_term_retention_policies: azure.mgmt.sql.operations.LongTermRetentionPoliciesOperations + :ivar private_endpoint_connections: PrivateEndpointConnections operations + :vartype private_endpoint_connections: azure.mgmt.sql.operations.PrivateEndpointConnectionsOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -417,10 +420,6 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.job_versions = JobVersionsOperations( self._client, self.config, self._serialize, self._deserialize) - self.long_term_retention_backups = LongTermRetentionBackupsOperations( - self._client, self.config, self._serialize, self._deserialize) - self.backup_long_term_retention_policies = BackupLongTermRetentionPoliciesOperations( - self._client, self.config, self._serialize, self._deserialize) self.managed_backup_short_term_retention_policies = ManagedBackupShortTermRetentionPoliciesOperations( self._client, self.config, self._serialize, self._deserialize) self.managed_restorable_dropped_database_backup_short_term_retention_policies = ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations( @@ -479,8 +478,6 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.usages = UsagesOperations( self._client, self.config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self.config, self._serialize, self._deserialize) self.private_link_resources = PrivateLinkResourcesOperations( self._client, self.config, self._serialize, self._deserialize) self.servers = ServersOperations( @@ -503,6 +500,8 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.sync_members = SyncMembersOperations( self._client, self.config, self._serialize, self._deserialize) + self.import_export = ImportExportOperations( + self._client, self.config, self._serialize, self._deserialize) self.managed_databases = ManagedDatabasesOperations( self._client, self.config, self._serialize, self._deserialize) self.managed_database_restore_details = ManagedDatabaseRestoreDetailsOperations( @@ -517,3 +516,9 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.server_dev_ops_audit_settings = ServerDevOpsAuditSettingsOperations( self._client, self.config, self._serialize, self._deserialize) + self.long_term_retention_backups = LongTermRetentionBackupsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.long_term_retention_policies = LongTermRetentionPoliciesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/__init__.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/__init__.py index 623c226d6496..e93ee0a613cb 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/__init__.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/__init__.py @@ -13,11 +13,11 @@ from ._models_py3 import AutomaticTuningOptions from ._models_py3 import AutomaticTuningServerOptions from ._models_py3 import AutoPauseDelayTimeRange - from ._models_py3 import BackupLongTermRetentionPolicy from ._models_py3 import BackupShortTermRetentionPolicy from ._models_py3 import CheckNameAvailabilityRequest from ._models_py3 import CheckNameAvailabilityResponse from ._models_py3 import CompleteDatabaseRestoreDefinition + from ._models_py3 import CopyLongTermRetentionBackupParameters from ._models_py3 import CreateDatabaseRestorePointDefinition from ._models_py3 import Database from ._models_py3 import DatabaseAutomaticTuning @@ -82,6 +82,8 @@ from ._models_py3 import LocationCapabilities from ._models_py3 import LogSizeCapability from ._models_py3 import LongTermRetentionBackup + from ._models_py3 import LongTermRetentionBackupOperationResult + from ._models_py3 import LongTermRetentionPolicy from ._models_py3 import MaintenanceConfigurationCapability from ._models_py3 import ManagedBackupShortTermRetentionPolicy from ._models_py3 import ManagedDatabase @@ -198,6 +200,7 @@ from ._models_py3 import TransparentDataEncryption from ._models_py3 import TransparentDataEncryptionActivity from ._models_py3 import UnlinkParameters + from ._models_py3 import UpdateLongTermRetentionBackupParameters from ._models_py3 import UpsertManagedServerOperationParameters from ._models_py3 import UpsertManagedServerOperationStep from ._models_py3 import Usage @@ -213,11 +216,11 @@ from ._models import AutomaticTuningOptions from ._models import AutomaticTuningServerOptions from ._models import AutoPauseDelayTimeRange - from ._models import BackupLongTermRetentionPolicy from ._models import BackupShortTermRetentionPolicy from ._models import CheckNameAvailabilityRequest from ._models import CheckNameAvailabilityResponse from ._models import CompleteDatabaseRestoreDefinition + from ._models import CopyLongTermRetentionBackupParameters from ._models import CreateDatabaseRestorePointDefinition from ._models import Database from ._models import DatabaseAutomaticTuning @@ -282,6 +285,8 @@ from ._models import LocationCapabilities from ._models import LogSizeCapability from ._models import LongTermRetentionBackup + from ._models import LongTermRetentionBackupOperationResult + from ._models import LongTermRetentionPolicy from ._models import MaintenanceConfigurationCapability from ._models import ManagedBackupShortTermRetentionPolicy from ._models import ManagedDatabase @@ -398,6 +403,7 @@ from ._models import TransparentDataEncryption from ._models import TransparentDataEncryptionActivity from ._models import UnlinkParameters + from ._models import UpdateLongTermRetentionBackupParameters from ._models import UpsertManagedServerOperationParameters from ._models import UpsertManagedServerOperationStep from ._models import Usage @@ -436,6 +442,7 @@ from ._paged_models import JobTargetGroupPaged from ._paged_models import JobVersionPaged from ._paged_models import LongTermRetentionBackupPaged +from ._paged_models import LongTermRetentionPolicyPaged from ._paged_models import ManagedBackupShortTermRetentionPolicyPaged from ._paged_models import ManagedDatabasePaged from ._paged_models import ManagedDatabaseSecurityAlertPolicyPaged @@ -545,10 +552,10 @@ VulnerabilityAssessmentScanState, InstanceFailoverGroupReplicationRole, InstancePoolLicenseType, + IdentityType, PrivateLinkServiceConnectionStateStatus, PrivateLinkServiceConnectionStateActionsRequire, PrivateEndpointProvisioningState, - IdentityType, ServerPublicNetworkAccess, CheckNameAvailabilityReason, MaxSizeUnit, @@ -561,6 +568,7 @@ SyncGroupState, SyncDirection, SyncMemberState, + StorageKeyType, ManagedDatabaseStatus, CatalogCollationType, ManagedDatabaseCreateMode, @@ -570,26 +578,23 @@ StorageAccountType, QueryTimeGrainType, QueryMetricUnitType, + ElasticPoolState, + ElasticPoolLicenseType, + CreatedByType, CreateMode, SampleName, DatabaseStatus, DatabaseLicenseType, DatabaseReadScale, SecondaryType, - StorageKeyType, - ElasticPoolState, - ElasticPoolLicenseType, - CreatedByType, - LongTermRetentionDatabaseState, + CurrentBackupStorageRedundancy, + RequestedBackupStorageRedundancy, + TargetBackupStorageRedundancy, + BackupStorageRedundancy, VulnerabilityAssessmentPolicyBaselineName, SensitivityLabelSource, CapabilityGroup, - DatabaseState1, - DatabaseState2, - DatabaseState3, - DatabaseState4, - DatabaseState5, - DatabaseState6, + DatabaseState, AggregationFunctionType, MetricType, ReplicaType, @@ -599,11 +604,11 @@ 'AutomaticTuningOptions', 'AutomaticTuningServerOptions', 'AutoPauseDelayTimeRange', - 'BackupLongTermRetentionPolicy', 'BackupShortTermRetentionPolicy', 'CheckNameAvailabilityRequest', 'CheckNameAvailabilityResponse', 'CompleteDatabaseRestoreDefinition', + 'CopyLongTermRetentionBackupParameters', 'CreateDatabaseRestorePointDefinition', 'Database', 'DatabaseAutomaticTuning', @@ -668,6 +673,8 @@ 'LocationCapabilities', 'LogSizeCapability', 'LongTermRetentionBackup', + 'LongTermRetentionBackupOperationResult', + 'LongTermRetentionPolicy', 'MaintenanceConfigurationCapability', 'ManagedBackupShortTermRetentionPolicy', 'ManagedDatabase', @@ -784,6 +791,7 @@ 'TransparentDataEncryption', 'TransparentDataEncryptionActivity', 'UnlinkParameters', + 'UpdateLongTermRetentionBackupParameters', 'UpsertManagedServerOperationParameters', 'UpsertManagedServerOperationStep', 'Usage', @@ -836,7 +844,6 @@ 'JobStepPaged', 'JobTargetGroupPaged', 'JobVersionPaged', - 'LongTermRetentionBackupPaged', 'ManagedBackupShortTermRetentionPolicyPaged', 'ServerDnsAliasPaged', 'ServerSecurityAlertPolicyPaged', @@ -858,7 +865,6 @@ 'ServerVulnerabilityAssessmentPaged', 'InstancePoolPaged', 'UsagePaged', - 'PrivateEndpointConnectionPaged', 'PrivateLinkResourcePaged', 'ServerPaged', 'ManagedInstanceLongTermRetentionBackupPaged', @@ -879,6 +885,9 @@ 'ManagedInstanceAzureADOnlyAuthenticationPaged', 'ServerTrustGroupPaged', 'ServerDevOpsAuditingSettingsPaged', + 'LongTermRetentionBackupPaged', + 'LongTermRetentionPolicyPaged', + 'PrivateEndpointConnectionPaged', 'ServerConnectionType', 'SecurityAlertPolicyState', 'SecurityAlertPolicyEmailAccountAdmins', @@ -930,10 +939,10 @@ 'VulnerabilityAssessmentScanState', 'InstanceFailoverGroupReplicationRole', 'InstancePoolLicenseType', + 'IdentityType', 'PrivateLinkServiceConnectionStateStatus', 'PrivateLinkServiceConnectionStateActionsRequire', 'PrivateEndpointProvisioningState', - 'IdentityType', 'ServerPublicNetworkAccess', 'CheckNameAvailabilityReason', 'MaxSizeUnit', @@ -946,6 +955,7 @@ 'SyncGroupState', 'SyncDirection', 'SyncMemberState', + 'StorageKeyType', 'ManagedDatabaseStatus', 'CatalogCollationType', 'ManagedDatabaseCreateMode', @@ -955,26 +965,23 @@ 'StorageAccountType', 'QueryTimeGrainType', 'QueryMetricUnitType', + 'ElasticPoolState', + 'ElasticPoolLicenseType', + 'CreatedByType', 'CreateMode', 'SampleName', 'DatabaseStatus', 'DatabaseLicenseType', 'DatabaseReadScale', 'SecondaryType', - 'StorageKeyType', - 'ElasticPoolState', - 'ElasticPoolLicenseType', - 'CreatedByType', - 'LongTermRetentionDatabaseState', + 'CurrentBackupStorageRedundancy', + 'RequestedBackupStorageRedundancy', + 'TargetBackupStorageRedundancy', + 'BackupStorageRedundancy', 'VulnerabilityAssessmentPolicyBaselineName', 'SensitivityLabelSource', 'CapabilityGroup', - 'DatabaseState1', - 'DatabaseState2', - 'DatabaseState3', - 'DatabaseState4', - 'DatabaseState5', - 'DatabaseState6', + 'DatabaseState', 'AggregationFunctionType', 'MetricType', 'ReplicaType', diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models.py index e1923c027292..6f1e1c1fdd01 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models.py @@ -215,56 +215,6 @@ def __init__(self, **kwargs): super(ProxyResource, self).__init__(**kwargs) -class BackupLongTermRetentionPolicy(ProxyResource): - """A long term retention policy. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: Resource ID. - :vartype id: str - :ivar name: Resource name. - :vartype name: str - :ivar type: Resource type. - :vartype type: str - :param weekly_retention: The weekly retention policy for an LTR backup in - an ISO 8601 format. - :type weekly_retention: str - :param monthly_retention: The monthly retention policy for an LTR backup - in an ISO 8601 format. - :type monthly_retention: str - :param yearly_retention: The yearly retention policy for an LTR backup in - an ISO 8601 format. - :type yearly_retention: str - :param week_of_year: The week of year to take the yearly backup in an ISO - 8601 format. - :type week_of_year: int - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'weekly_retention': {'key': 'properties.weeklyRetention', 'type': 'str'}, - 'monthly_retention': {'key': 'properties.monthlyRetention', 'type': 'str'}, - 'yearly_retention': {'key': 'properties.yearlyRetention', 'type': 'str'}, - 'week_of_year': {'key': 'properties.weekOfYear', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(BackupLongTermRetentionPolicy, self).__init__(**kwargs) - self.weekly_retention = kwargs.get('weekly_retention', None) - self.monthly_retention = kwargs.get('monthly_retention', None) - self.yearly_retention = kwargs.get('yearly_retention', None) - self.week_of_year = kwargs.get('week_of_year', None) - - class BackupShortTermRetentionPolicy(ProxyResource): """A short term retention policy. @@ -403,6 +353,50 @@ def __init__(self, **kwargs): self.last_backup_name = kwargs.get('last_backup_name', None) +class CopyLongTermRetentionBackupParameters(Model): + """Contains the information necessary to perform long term retention backup + copy operation. + + :param target_subscription_id: The subscription that owns the target + server + :type target_subscription_id: str + :param target_resource_group: The resource group that owns the target + server + :type target_resource_group: str + :param target_server_resource_id: The resource Id of the target server + that owns the database + :type target_server_resource_id: str + :param target_server_fully_qualified_domain_name: The fully qualified + domain name of the target server + :type target_server_fully_qualified_domain_name: str + :param target_database_name: The name of the database owns the copied + backup. + :type target_database_name: str + :param target_backup_storage_redundancy: The storage redundancy type of + the copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :type target_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.TargetBackupStorageRedundancy + """ + + _attribute_map = { + 'target_subscription_id': {'key': 'properties.targetSubscriptionId', 'type': 'str'}, + 'target_resource_group': {'key': 'properties.targetResourceGroup', 'type': 'str'}, + 'target_server_resource_id': {'key': 'properties.targetServerResourceId', 'type': 'str'}, + 'target_server_fully_qualified_domain_name': {'key': 'properties.targetServerFullyQualifiedDomainName', 'type': 'str'}, + 'target_database_name': {'key': 'properties.targetDatabaseName', 'type': 'str'}, + 'target_backup_storage_redundancy': {'key': 'properties.targetBackupStorageRedundancy', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CopyLongTermRetentionBackupParameters, self).__init__(**kwargs) + self.target_subscription_id = kwargs.get('target_subscription_id', None) + self.target_resource_group = kwargs.get('target_resource_group', None) + self.target_server_resource_id = kwargs.get('target_server_resource_id', None) + self.target_server_fully_qualified_domain_name = kwargs.get('target_server_fully_qualified_domain_name', None) + self.target_database_name = kwargs.get('target_database_name', None) + self.target_backup_storage_redundancy = kwargs.get('target_backup_storage_redundancy', None) + + class CreateDatabaseRestorePointDefinition(Model): """Contains the information necessary to perform a create database restore point operation. @@ -624,10 +618,16 @@ class Database(TrackedResource): :param auto_pause_delay: Time in minutes after which database is automatically paused. A value of -1 means that automatic pause is disabled :type auto_pause_delay: int - :param storage_account_type: The storage account type used to store - backups for this database. Possible values include: 'GRS', 'LRS', 'ZRS' - :type storage_account_type: str or - ~azure.mgmt.sql.models.StorageAccountType + :ivar current_backup_storage_redundancy: The storage account type used to + store backups for this database. Possible values include: 'Geo', 'Local', + 'Zone' + :vartype current_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.CurrentBackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage account type to be + used to store backups for this database. Possible values include: 'Geo', + 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy :param min_capacity: Minimal capacity that database will always have allocated, if not paused :type min_capacity: float @@ -660,6 +660,7 @@ class Database(TrackedResource): 'max_log_size_bytes': {'readonly': True}, 'earliest_restore_date': {'readonly': True}, 'current_sku': {'readonly': True}, + 'current_backup_storage_redundancy': {'readonly': True}, 'paused_date': {'readonly': True}, 'resumed_date': {'readonly': True}, } @@ -702,7 +703,8 @@ class Database(TrackedResource): 'secondary_type': {'key': 'properties.secondaryType', 'type': 'str'}, 'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'}, 'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'}, - 'storage_account_type': {'key': 'properties.storageAccountType', 'type': 'str'}, + 'current_backup_storage_redundancy': {'key': 'properties.currentBackupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, 'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'}, 'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'}, 'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'}, @@ -743,7 +745,8 @@ def __init__(self, **kwargs): self.secondary_type = kwargs.get('secondary_type', None) self.current_sku = None self.auto_pause_delay = kwargs.get('auto_pause_delay', None) - self.storage_account_type = kwargs.get('storage_account_type', None) + self.current_backup_storage_redundancy = None + self.requested_backup_storage_redundancy = kwargs.get('requested_backup_storage_redundancy', None) self.min_capacity = kwargs.get('min_capacity', None) self.paused_date = None self.resumed_date = None @@ -1287,10 +1290,16 @@ class DatabaseUpdate(Model): :param auto_pause_delay: Time in minutes after which database is automatically paused. A value of -1 means that automatic pause is disabled :type auto_pause_delay: int - :param storage_account_type: The storage account type used to store - backups for this database. Possible values include: 'GRS', 'LRS', 'ZRS' - :type storage_account_type: str or - ~azure.mgmt.sql.models.StorageAccountType + :ivar current_backup_storage_redundancy: The storage account type used to + store backups for this database. Possible values include: 'Geo', 'Local', + 'Zone' + :vartype current_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.CurrentBackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage account type to be + used to store backups for this database. Possible values include: 'Geo', + 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy :param min_capacity: Minimal capacity that database will always have allocated, if not paused :type min_capacity: float @@ -1319,6 +1328,7 @@ class DatabaseUpdate(Model): 'max_log_size_bytes': {'readonly': True}, 'earliest_restore_date': {'readonly': True}, 'current_sku': {'readonly': True}, + 'current_backup_storage_redundancy': {'readonly': True}, 'paused_date': {'readonly': True}, 'resumed_date': {'readonly': True}, } @@ -1354,7 +1364,8 @@ class DatabaseUpdate(Model): 'secondary_type': {'key': 'properties.secondaryType', 'type': 'str'}, 'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'}, 'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'}, - 'storage_account_type': {'key': 'properties.storageAccountType', 'type': 'str'}, + 'current_backup_storage_redundancy': {'key': 'properties.currentBackupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, 'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'}, 'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'}, 'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'}, @@ -1394,7 +1405,8 @@ def __init__(self, **kwargs): self.secondary_type = kwargs.get('secondary_type', None) self.current_sku = None self.auto_pause_delay = kwargs.get('auto_pause_delay', None) - self.storage_account_type = kwargs.get('storage_account_type', None) + self.current_backup_storage_redundancy = None + self.requested_backup_storage_redundancy = kwargs.get('requested_backup_storage_redundancy', None) self.min_capacity = kwargs.get('min_capacity', None) self.paused_date = None self.resumed_date = None @@ -4621,6 +4633,14 @@ class LongTermRetentionBackup(ProxyResource): :ivar backup_expiration_time: The time the long term retention backup will expire. :vartype backup_expiration_time: datetime + :ivar backup_storage_redundancy: The storage redundancy type of the + backup. Possible values include: 'Geo', 'Local', 'Zone' + :vartype backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage redundancy type of + the backup. Possible values include: 'Geo', 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy """ _validation = { @@ -4633,6 +4653,7 @@ class LongTermRetentionBackup(ProxyResource): 'database_deletion_time': {'readonly': True}, 'backup_time': {'readonly': True}, 'backup_expiration_time': {'readonly': True}, + 'backup_storage_redundancy': {'readonly': True}, } _attribute_map = { @@ -4645,6 +4666,8 @@ class LongTermRetentionBackup(ProxyResource): 'database_deletion_time': {'key': 'properties.databaseDeletionTime', 'type': 'iso-8601'}, 'backup_time': {'key': 'properties.backupTime', 'type': 'iso-8601'}, 'backup_expiration_time': {'key': 'properties.backupExpirationTime', 'type': 'iso-8601'}, + 'backup_storage_redundancy': {'key': 'properties.backupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, } def __init__(self, **kwargs): @@ -4655,6 +4678,125 @@ def __init__(self, **kwargs): self.database_deletion_time = None self.backup_time = None self.backup_expiration_time = None + self.backup_storage_redundancy = None + self.requested_backup_storage_redundancy = kwargs.get('requested_backup_storage_redundancy', None) + + +class LongTermRetentionBackupOperationResult(ProxyResource): + """A LongTermRetentionBackup operation result resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar request_id: Request Id. + :vartype request_id: str + :ivar operation_type: Operation type. + :vartype operation_type: str + :ivar from_backup_resource_id: Source backup resource id + :vartype from_backup_resource_id: str + :ivar to_backup_resource_id: Target backup resource id + :vartype to_backup_resource_id: str + :ivar target_backup_storage_redundancy: The storage redundancy type of the + copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :vartype target_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy + :ivar status: Operation status + :vartype status: str + :ivar message: Progress message + :vartype message: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'request_id': {'readonly': True}, + 'operation_type': {'readonly': True}, + 'from_backup_resource_id': {'readonly': True}, + 'to_backup_resource_id': {'readonly': True}, + 'target_backup_storage_redundancy': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_id': {'key': 'properties.requestId', 'type': 'str'}, + 'operation_type': {'key': 'properties.operationType', 'type': 'str'}, + 'from_backup_resource_id': {'key': 'properties.fromBackupResourceId', 'type': 'str'}, + 'to_backup_resource_id': {'key': 'properties.toBackupResourceId', 'type': 'str'}, + 'target_backup_storage_redundancy': {'key': 'properties.targetBackupStorageRedundancy', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'message': {'key': 'properties.message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LongTermRetentionBackupOperationResult, self).__init__(**kwargs) + self.request_id = None + self.operation_type = None + self.from_backup_resource_id = None + self.to_backup_resource_id = None + self.target_backup_storage_redundancy = None + self.status = None + self.message = None + + +class LongTermRetentionPolicy(ProxyResource): + """A long term retention policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param weekly_retention: The weekly retention policy for an LTR backup in + an ISO 8601 format. + :type weekly_retention: str + :param monthly_retention: The monthly retention policy for an LTR backup + in an ISO 8601 format. + :type monthly_retention: str + :param yearly_retention: The yearly retention policy for an LTR backup in + an ISO 8601 format. + :type yearly_retention: str + :param week_of_year: The week of year to take the yearly backup in an ISO + 8601 format. + :type week_of_year: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'weekly_retention': {'key': 'properties.weeklyRetention', 'type': 'str'}, + 'monthly_retention': {'key': 'properties.monthlyRetention', 'type': 'str'}, + 'yearly_retention': {'key': 'properties.yearlyRetention', 'type': 'str'}, + 'week_of_year': {'key': 'properties.weekOfYear', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(LongTermRetentionPolicy, self).__init__(**kwargs) + self.weekly_retention = kwargs.get('weekly_retention', None) + self.monthly_retention = kwargs.get('monthly_retention', None) + self.yearly_retention = kwargs.get('yearly_retention', None) + self.week_of_year = kwargs.get('week_of_year', None) class MaintenanceConfigurationCapability(Model): @@ -10395,38 +10537,22 @@ def __init__(self, **kwargs): class SystemData(Model): """Metadata pertaining to creation and last modification of the resource. - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar created_by: A string identifier for the identity that created the - resource. - :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource: - . Possible values include: 'User', - 'Application', 'ManagedIdentity', 'Key' - :vartype created_by_type: str or ~azure.mgmt.sql.models.CreatedByType - :ivar created_at: The timestamp of resource creation (UTC). - :vartype created_at: datetime - :ivar last_modified_by: A string identifier for the identity that last - modified the resource. - :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the - resource: . Possible values include: - 'User', 'Application', 'ManagedIdentity', 'Key' - :vartype last_modified_by_type: str or - ~azure.mgmt.sql.models.CreatedByType - :ivar last_modified_at: The timestamp of last modification (UTC). - :vartype last_modified_at: datetime - """ - - _validation = { - 'created_by': {'readonly': True}, - 'created_by_type': {'readonly': True}, - 'created_at': {'readonly': True}, - 'last_modified_by': {'readonly': True}, - 'last_modified_by_type': {'readonly': True}, - 'last_modified_at': {'readonly': True}, - } + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. + Possible values include: 'User', 'Application', 'ManagedIdentity', 'Key' + :type created_by_type: str or ~azure.mgmt.sql.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the + resource. Possible values include: 'User', 'Application', + 'ManagedIdentity', 'Key' + :type last_modified_by_type: str or ~azure.mgmt.sql.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC) + :type last_modified_at: datetime + """ _attribute_map = { 'created_by': {'key': 'createdBy', 'type': 'str'}, @@ -10439,12 +10565,12 @@ class SystemData(Model): def __init__(self, **kwargs): super(SystemData, self).__init__(**kwargs) - self.created_by = None - self.created_by_type = None - self.created_at = None - self.last_modified_by = None - self.last_modified_by_type = None - self.last_modified_at = None + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) class TdeCertificate(ProxyResource): @@ -10650,6 +10776,25 @@ def __init__(self, **kwargs): self.forced_termination = kwargs.get('forced_termination', None) +class UpdateLongTermRetentionBackupParameters(Model): + """Contains the information necessary to perform long term retention backup + update operation. + + :param requested_backup_storage_redundancy: The storage redundancy type of + the copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy + """ + + _attribute_map = { + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UpdateLongTermRetentionBackupParameters, self).__init__(**kwargs) + self.requested_backup_storage_redundancy = kwargs.get('requested_backup_storage_redundancy', None) + + class UpsertManagedServerOperationParameters(Model): """UpsertManagedServerOperationParameters. diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models_py3.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models_py3.py index 4346f7f65c48..4bbd9d139787 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models_py3.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models_py3.py @@ -215,56 +215,6 @@ def __init__(self, **kwargs) -> None: super(ProxyResource, self).__init__(**kwargs) -class BackupLongTermRetentionPolicy(ProxyResource): - """A long term retention policy. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: Resource ID. - :vartype id: str - :ivar name: Resource name. - :vartype name: str - :ivar type: Resource type. - :vartype type: str - :param weekly_retention: The weekly retention policy for an LTR backup in - an ISO 8601 format. - :type weekly_retention: str - :param monthly_retention: The monthly retention policy for an LTR backup - in an ISO 8601 format. - :type monthly_retention: str - :param yearly_retention: The yearly retention policy for an LTR backup in - an ISO 8601 format. - :type yearly_retention: str - :param week_of_year: The week of year to take the yearly backup in an ISO - 8601 format. - :type week_of_year: int - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'weekly_retention': {'key': 'properties.weeklyRetention', 'type': 'str'}, - 'monthly_retention': {'key': 'properties.monthlyRetention', 'type': 'str'}, - 'yearly_retention': {'key': 'properties.yearlyRetention', 'type': 'str'}, - 'week_of_year': {'key': 'properties.weekOfYear', 'type': 'int'}, - } - - def __init__(self, *, weekly_retention: str=None, monthly_retention: str=None, yearly_retention: str=None, week_of_year: int=None, **kwargs) -> None: - super(BackupLongTermRetentionPolicy, self).__init__(**kwargs) - self.weekly_retention = weekly_retention - self.monthly_retention = monthly_retention - self.yearly_retention = yearly_retention - self.week_of_year = week_of_year - - class BackupShortTermRetentionPolicy(ProxyResource): """A short term retention policy. @@ -403,6 +353,50 @@ def __init__(self, *, last_backup_name: str, **kwargs) -> None: self.last_backup_name = last_backup_name +class CopyLongTermRetentionBackupParameters(Model): + """Contains the information necessary to perform long term retention backup + copy operation. + + :param target_subscription_id: The subscription that owns the target + server + :type target_subscription_id: str + :param target_resource_group: The resource group that owns the target + server + :type target_resource_group: str + :param target_server_resource_id: The resource Id of the target server + that owns the database + :type target_server_resource_id: str + :param target_server_fully_qualified_domain_name: The fully qualified + domain name of the target server + :type target_server_fully_qualified_domain_name: str + :param target_database_name: The name of the database owns the copied + backup. + :type target_database_name: str + :param target_backup_storage_redundancy: The storage redundancy type of + the copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :type target_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.TargetBackupStorageRedundancy + """ + + _attribute_map = { + 'target_subscription_id': {'key': 'properties.targetSubscriptionId', 'type': 'str'}, + 'target_resource_group': {'key': 'properties.targetResourceGroup', 'type': 'str'}, + 'target_server_resource_id': {'key': 'properties.targetServerResourceId', 'type': 'str'}, + 'target_server_fully_qualified_domain_name': {'key': 'properties.targetServerFullyQualifiedDomainName', 'type': 'str'}, + 'target_database_name': {'key': 'properties.targetDatabaseName', 'type': 'str'}, + 'target_backup_storage_redundancy': {'key': 'properties.targetBackupStorageRedundancy', 'type': 'str'}, + } + + def __init__(self, *, target_subscription_id: str=None, target_resource_group: str=None, target_server_resource_id: str=None, target_server_fully_qualified_domain_name: str=None, target_database_name: str=None, target_backup_storage_redundancy=None, **kwargs) -> None: + super(CopyLongTermRetentionBackupParameters, self).__init__(**kwargs) + self.target_subscription_id = target_subscription_id + self.target_resource_group = target_resource_group + self.target_server_resource_id = target_server_resource_id + self.target_server_fully_qualified_domain_name = target_server_fully_qualified_domain_name + self.target_database_name = target_database_name + self.target_backup_storage_redundancy = target_backup_storage_redundancy + + class CreateDatabaseRestorePointDefinition(Model): """Contains the information necessary to perform a create database restore point operation. @@ -624,10 +618,16 @@ class Database(TrackedResource): :param auto_pause_delay: Time in minutes after which database is automatically paused. A value of -1 means that automatic pause is disabled :type auto_pause_delay: int - :param storage_account_type: The storage account type used to store - backups for this database. Possible values include: 'GRS', 'LRS', 'ZRS' - :type storage_account_type: str or - ~azure.mgmt.sql.models.StorageAccountType + :ivar current_backup_storage_redundancy: The storage account type used to + store backups for this database. Possible values include: 'Geo', 'Local', + 'Zone' + :vartype current_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.CurrentBackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage account type to be + used to store backups for this database. Possible values include: 'Geo', + 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy :param min_capacity: Minimal capacity that database will always have allocated, if not paused :type min_capacity: float @@ -660,6 +660,7 @@ class Database(TrackedResource): 'max_log_size_bytes': {'readonly': True}, 'earliest_restore_date': {'readonly': True}, 'current_sku': {'readonly': True}, + 'current_backup_storage_redundancy': {'readonly': True}, 'paused_date': {'readonly': True}, 'resumed_date': {'readonly': True}, } @@ -702,14 +703,15 @@ class Database(TrackedResource): 'secondary_type': {'key': 'properties.secondaryType', 'type': 'str'}, 'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'}, 'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'}, - 'storage_account_type': {'key': 'properties.storageAccountType', 'type': 'str'}, + 'current_backup_storage_redundancy': {'key': 'properties.currentBackupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, 'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'}, 'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'}, 'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'}, 'maintenance_configuration_id': {'key': 'properties.maintenanceConfigurationId', 'type': 'str'}, } - def __init__(self, *, location: str, tags=None, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, high_availability_replica_count: int=None, secondary_type=None, auto_pause_delay: int=None, storage_account_type=None, min_capacity: float=None, maintenance_configuration_id: str=None, **kwargs) -> None: + def __init__(self, *, location: str, tags=None, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, high_availability_replica_count: int=None, secondary_type=None, auto_pause_delay: int=None, requested_backup_storage_redundancy=None, min_capacity: float=None, maintenance_configuration_id: str=None, **kwargs) -> None: super(Database, self).__init__(location=location, tags=tags, **kwargs) self.sku = sku self.kind = None @@ -743,7 +745,8 @@ def __init__(self, *, location: str, tags=None, sku=None, create_mode=None, coll self.secondary_type = secondary_type self.current_sku = None self.auto_pause_delay = auto_pause_delay - self.storage_account_type = storage_account_type + self.current_backup_storage_redundancy = None + self.requested_backup_storage_redundancy = requested_backup_storage_redundancy self.min_capacity = min_capacity self.paused_date = None self.resumed_date = None @@ -1287,10 +1290,16 @@ class DatabaseUpdate(Model): :param auto_pause_delay: Time in minutes after which database is automatically paused. A value of -1 means that automatic pause is disabled :type auto_pause_delay: int - :param storage_account_type: The storage account type used to store - backups for this database. Possible values include: 'GRS', 'LRS', 'ZRS' - :type storage_account_type: str or - ~azure.mgmt.sql.models.StorageAccountType + :ivar current_backup_storage_redundancy: The storage account type used to + store backups for this database. Possible values include: 'Geo', 'Local', + 'Zone' + :vartype current_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.CurrentBackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage account type to be + used to store backups for this database. Possible values include: 'Geo', + 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy :param min_capacity: Minimal capacity that database will always have allocated, if not paused :type min_capacity: float @@ -1319,6 +1328,7 @@ class DatabaseUpdate(Model): 'max_log_size_bytes': {'readonly': True}, 'earliest_restore_date': {'readonly': True}, 'current_sku': {'readonly': True}, + 'current_backup_storage_redundancy': {'readonly': True}, 'paused_date': {'readonly': True}, 'resumed_date': {'readonly': True}, } @@ -1354,7 +1364,8 @@ class DatabaseUpdate(Model): 'secondary_type': {'key': 'properties.secondaryType', 'type': 'str'}, 'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'}, 'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'}, - 'storage_account_type': {'key': 'properties.storageAccountType', 'type': 'str'}, + 'current_backup_storage_redundancy': {'key': 'properties.currentBackupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, 'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'}, 'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'}, 'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'}, @@ -1362,7 +1373,7 @@ class DatabaseUpdate(Model): 'tags': {'key': 'tags', 'type': '{str}'}, } - def __init__(self, *, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, high_availability_replica_count: int=None, secondary_type=None, auto_pause_delay: int=None, storage_account_type=None, min_capacity: float=None, maintenance_configuration_id: str=None, tags=None, **kwargs) -> None: + def __init__(self, *, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, high_availability_replica_count: int=None, secondary_type=None, auto_pause_delay: int=None, requested_backup_storage_redundancy=None, min_capacity: float=None, maintenance_configuration_id: str=None, tags=None, **kwargs) -> None: super(DatabaseUpdate, self).__init__(**kwargs) self.sku = sku self.create_mode = create_mode @@ -1394,7 +1405,8 @@ def __init__(self, *, sku=None, create_mode=None, collation: str=None, max_size_ self.secondary_type = secondary_type self.current_sku = None self.auto_pause_delay = auto_pause_delay - self.storage_account_type = storage_account_type + self.current_backup_storage_redundancy = None + self.requested_backup_storage_redundancy = requested_backup_storage_redundancy self.min_capacity = min_capacity self.paused_date = None self.resumed_date = None @@ -4621,6 +4633,14 @@ class LongTermRetentionBackup(ProxyResource): :ivar backup_expiration_time: The time the long term retention backup will expire. :vartype backup_expiration_time: datetime + :ivar backup_storage_redundancy: The storage redundancy type of the + backup. Possible values include: 'Geo', 'Local', 'Zone' + :vartype backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy + :param requested_backup_storage_redundancy: The storage redundancy type of + the backup. Possible values include: 'Geo', 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy """ _validation = { @@ -4633,6 +4653,7 @@ class LongTermRetentionBackup(ProxyResource): 'database_deletion_time': {'readonly': True}, 'backup_time': {'readonly': True}, 'backup_expiration_time': {'readonly': True}, + 'backup_storage_redundancy': {'readonly': True}, } _attribute_map = { @@ -4645,9 +4666,11 @@ class LongTermRetentionBackup(ProxyResource): 'database_deletion_time': {'key': 'properties.databaseDeletionTime', 'type': 'iso-8601'}, 'backup_time': {'key': 'properties.backupTime', 'type': 'iso-8601'}, 'backup_expiration_time': {'key': 'properties.backupExpirationTime', 'type': 'iso-8601'}, + 'backup_storage_redundancy': {'key': 'properties.backupStorageRedundancy', 'type': 'str'}, + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, } - def __init__(self, **kwargs) -> None: + def __init__(self, *, requested_backup_storage_redundancy=None, **kwargs) -> None: super(LongTermRetentionBackup, self).__init__(**kwargs) self.server_name = None self.server_create_time = None @@ -4655,6 +4678,125 @@ def __init__(self, **kwargs) -> None: self.database_deletion_time = None self.backup_time = None self.backup_expiration_time = None + self.backup_storage_redundancy = None + self.requested_backup_storage_redundancy = requested_backup_storage_redundancy + + +class LongTermRetentionBackupOperationResult(ProxyResource): + """A LongTermRetentionBackup operation result resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :ivar request_id: Request Id. + :vartype request_id: str + :ivar operation_type: Operation type. + :vartype operation_type: str + :ivar from_backup_resource_id: Source backup resource id + :vartype from_backup_resource_id: str + :ivar to_backup_resource_id: Target backup resource id + :vartype to_backup_resource_id: str + :ivar target_backup_storage_redundancy: The storage redundancy type of the + copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :vartype target_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.BackupStorageRedundancy + :ivar status: Operation status + :vartype status: str + :ivar message: Progress message + :vartype message: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'request_id': {'readonly': True}, + 'operation_type': {'readonly': True}, + 'from_backup_resource_id': {'readonly': True}, + 'to_backup_resource_id': {'readonly': True}, + 'target_backup_storage_redundancy': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_id': {'key': 'properties.requestId', 'type': 'str'}, + 'operation_type': {'key': 'properties.operationType', 'type': 'str'}, + 'from_backup_resource_id': {'key': 'properties.fromBackupResourceId', 'type': 'str'}, + 'to_backup_resource_id': {'key': 'properties.toBackupResourceId', 'type': 'str'}, + 'target_backup_storage_redundancy': {'key': 'properties.targetBackupStorageRedundancy', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'message': {'key': 'properties.message', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(LongTermRetentionBackupOperationResult, self).__init__(**kwargs) + self.request_id = None + self.operation_type = None + self.from_backup_resource_id = None + self.to_backup_resource_id = None + self.target_backup_storage_redundancy = None + self.status = None + self.message = None + + +class LongTermRetentionPolicy(ProxyResource): + """A long term retention policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID. + :vartype id: str + :ivar name: Resource name. + :vartype name: str + :ivar type: Resource type. + :vartype type: str + :param weekly_retention: The weekly retention policy for an LTR backup in + an ISO 8601 format. + :type weekly_retention: str + :param monthly_retention: The monthly retention policy for an LTR backup + in an ISO 8601 format. + :type monthly_retention: str + :param yearly_retention: The yearly retention policy for an LTR backup in + an ISO 8601 format. + :type yearly_retention: str + :param week_of_year: The week of year to take the yearly backup in an ISO + 8601 format. + :type week_of_year: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'weekly_retention': {'key': 'properties.weeklyRetention', 'type': 'str'}, + 'monthly_retention': {'key': 'properties.monthlyRetention', 'type': 'str'}, + 'yearly_retention': {'key': 'properties.yearlyRetention', 'type': 'str'}, + 'week_of_year': {'key': 'properties.weekOfYear', 'type': 'int'}, + } + + def __init__(self, *, weekly_retention: str=None, monthly_retention: str=None, yearly_retention: str=None, week_of_year: int=None, **kwargs) -> None: + super(LongTermRetentionPolicy, self).__init__(**kwargs) + self.weekly_retention = weekly_retention + self.monthly_retention = monthly_retention + self.yearly_retention = yearly_retention + self.week_of_year = week_of_year class MaintenanceConfigurationCapability(Model): @@ -10395,38 +10537,22 @@ def __init__(self, *, database_type=None, sync_agent_id: str=None, sql_server_da class SystemData(Model): """Metadata pertaining to creation and last modification of the resource. - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar created_by: A string identifier for the identity that created the - resource. - :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource: - . Possible values include: 'User', - 'Application', 'ManagedIdentity', 'Key' - :vartype created_by_type: str or ~azure.mgmt.sql.models.CreatedByType - :ivar created_at: The timestamp of resource creation (UTC). - :vartype created_at: datetime - :ivar last_modified_by: A string identifier for the identity that last - modified the resource. - :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the - resource: . Possible values include: - 'User', 'Application', 'ManagedIdentity', 'Key' - :vartype last_modified_by_type: str or - ~azure.mgmt.sql.models.CreatedByType - :ivar last_modified_at: The timestamp of last modification (UTC). - :vartype last_modified_at: datetime - """ - - _validation = { - 'created_by': {'readonly': True}, - 'created_by_type': {'readonly': True}, - 'created_at': {'readonly': True}, - 'last_modified_by': {'readonly': True}, - 'last_modified_by_type': {'readonly': True}, - 'last_modified_at': {'readonly': True}, - } + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. + Possible values include: 'User', 'Application', 'ManagedIdentity', 'Key' + :type created_by_type: str or ~azure.mgmt.sql.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the + resource. Possible values include: 'User', 'Application', + 'ManagedIdentity', 'Key' + :type last_modified_by_type: str or ~azure.mgmt.sql.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC) + :type last_modified_at: datetime + """ _attribute_map = { 'created_by': {'key': 'createdBy', 'type': 'str'}, @@ -10437,14 +10563,14 @@ class SystemData(Model): 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, } - def __init__(self, **kwargs) -> None: + def __init__(self, *, created_by: str=None, created_by_type=None, created_at=None, last_modified_by: str=None, last_modified_by_type=None, last_modified_at=None, **kwargs) -> None: super(SystemData, self).__init__(**kwargs) - self.created_by = None - self.created_by_type = None - self.created_at = None - self.last_modified_by = None - self.last_modified_by_type = None - self.last_modified_at = None + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at class TdeCertificate(ProxyResource): @@ -10650,6 +10776,25 @@ def __init__(self, *, forced_termination: bool=None, **kwargs) -> None: self.forced_termination = forced_termination +class UpdateLongTermRetentionBackupParameters(Model): + """Contains the information necessary to perform long term retention backup + update operation. + + :param requested_backup_storage_redundancy: The storage redundancy type of + the copied backup. Possible values include: 'Geo', 'Local', 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy + """ + + _attribute_map = { + 'requested_backup_storage_redundancy': {'key': 'properties.requestedBackupStorageRedundancy', 'type': 'str'}, + } + + def __init__(self, *, requested_backup_storage_redundancy=None, **kwargs) -> None: + super(UpdateLongTermRetentionBackupParameters, self).__init__(**kwargs) + self.requested_backup_storage_redundancy = requested_backup_storage_redundancy + + class UpsertManagedServerOperationParameters(Model): """UpsertManagedServerOperationParameters. diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_paged_models.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_paged_models.py index a6ba1f7d34b2..08be7479eda1 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_paged_models.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_paged_models.py @@ -545,19 +545,6 @@ class JobVersionPaged(Paged): def __init__(self, *args, **kwargs): super(JobVersionPaged, self).__init__(*args, **kwargs) -class LongTermRetentionBackupPaged(Paged): - """ - A paging container for iterating over a list of :class:`LongTermRetentionBackup ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[LongTermRetentionBackup]'} - } - - def __init__(self, *args, **kwargs): - - super(LongTermRetentionBackupPaged, self).__init__(*args, **kwargs) class ManagedBackupShortTermRetentionPolicyPaged(Paged): """ A paging container for iterating over a list of :class:`ManagedBackupShortTermRetentionPolicy ` object @@ -831,19 +818,6 @@ class UsagePaged(Paged): def __init__(self, *args, **kwargs): super(UsagePaged, self).__init__(*args, **kwargs) -class PrivateEndpointConnectionPaged(Paged): - """ - A paging container for iterating over a list of :class:`PrivateEndpointConnection ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[PrivateEndpointConnection]'} - } - - def __init__(self, *args, **kwargs): - - super(PrivateEndpointConnectionPaged, self).__init__(*args, **kwargs) class PrivateLinkResourcePaged(Paged): """ A paging container for iterating over a list of :class:`PrivateLinkResource ` object @@ -1104,3 +1078,42 @@ class ServerDevOpsAuditingSettingsPaged(Paged): def __init__(self, *args, **kwargs): super(ServerDevOpsAuditingSettingsPaged, self).__init__(*args, **kwargs) +class LongTermRetentionBackupPaged(Paged): + """ + A paging container for iterating over a list of :class:`LongTermRetentionBackup ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LongTermRetentionBackup]'} + } + + def __init__(self, *args, **kwargs): + + super(LongTermRetentionBackupPaged, self).__init__(*args, **kwargs) +class LongTermRetentionPolicyPaged(Paged): + """ + A paging container for iterating over a list of :class:`LongTermRetentionPolicy ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LongTermRetentionPolicy]'} + } + + def __init__(self, *args, **kwargs): + + super(LongTermRetentionPolicyPaged, self).__init__(*args, **kwargs) +class PrivateEndpointConnectionPaged(Paged): + """ + A paging container for iterating over a list of :class:`PrivateEndpointConnection ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PrivateEndpointConnection]'} + } + + def __init__(self, *args, **kwargs): + + super(PrivateEndpointConnectionPaged, self).__init__(*args, **kwargs) diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_sql_management_client_enums.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_sql_management_client_enums.py index 8a2099ecd7d5..e4aac6ca14dd 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_sql_management_client_enums.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_sql_management_client_enums.py @@ -456,6 +456,13 @@ class InstancePoolLicenseType(str, Enum): base_price = "BasePrice" +class IdentityType(str, Enum): + + none = "None" + system_assigned = "SystemAssigned" + user_assigned = "UserAssigned" + + class PrivateLinkServiceConnectionStateStatus(str, Enum): approved = "Approved" @@ -478,13 +485,6 @@ class PrivateEndpointProvisioningState(str, Enum): rejecting = "Rejecting" -class IdentityType(str, Enum): - - none = "None" - system_assigned = "SystemAssigned" - user_assigned = "UserAssigned" - - class ServerPublicNetworkAccess(str, Enum): enabled = "Enabled" @@ -585,6 +585,12 @@ class SyncMemberState(str, Enum): un_reprovisioned = "UnReprovisioned" +class StorageKeyType(str, Enum): + + shared_access_key = "SharedAccessKey" + storage_access_key = "StorageAccessKey" + + class ManagedDatabaseStatus(str, Enum): online = "Online" @@ -651,6 +657,27 @@ class QueryMetricUnitType(str, Enum): count = "count" +class ElasticPoolState(str, Enum): + + creating = "Creating" + ready = "Ready" + disabled = "Disabled" + + +class ElasticPoolLicenseType(str, Enum): + + license_included = "LicenseIncluded" + base_price = "BasePrice" + + +class CreatedByType(str, Enum): + + user = "User" + application = "Application" + managed_identity = "ManagedIdentity" + key = "Key" + + class CreateMode(str, Enum): default = "Default" @@ -715,38 +742,32 @@ class SecondaryType(str, Enum): named = "Named" -class StorageKeyType(str, Enum): - - shared_access_key = "SharedAccessKey" - storage_access_key = "StorageAccessKey" - - -class ElasticPoolState(str, Enum): +class CurrentBackupStorageRedundancy(str, Enum): - creating = "Creating" - ready = "Ready" - disabled = "Disabled" + geo = "Geo" + local = "Local" + zone = "Zone" -class ElasticPoolLicenseType(str, Enum): +class RequestedBackupStorageRedundancy(str, Enum): - license_included = "LicenseIncluded" - base_price = "BasePrice" + geo = "Geo" + local = "Local" + zone = "Zone" -class CreatedByType(str, Enum): +class TargetBackupStorageRedundancy(str, Enum): - user = "User" - application = "Application" - managed_identity = "ManagedIdentity" - key = "Key" + geo = "Geo" + local = "Local" + zone = "Zone" -class LongTermRetentionDatabaseState(str, Enum): +class BackupStorageRedundancy(str, Enum): - all = "All" - live = "Live" - deleted = "Deleted" + geo = "Geo" + local = "Local" + zone = "Zone" class VulnerabilityAssessmentPolicyBaselineName(str, Enum): @@ -770,42 +791,7 @@ class CapabilityGroup(str, Enum): supported_managed_instance_editions = "supportedManagedInstanceEditions" -class DatabaseState1(str, Enum): - - all = "All" - live = "Live" - deleted = "Deleted" - - -class DatabaseState2(str, Enum): - - all = "All" - live = "Live" - deleted = "Deleted" - - -class DatabaseState3(str, Enum): - - all = "All" - live = "Live" - deleted = "Deleted" - - -class DatabaseState4(str, Enum): - - all = "All" - live = "Live" - deleted = "Deleted" - - -class DatabaseState5(str, Enum): - - all = "All" - live = "Live" - deleted = "Deleted" - - -class DatabaseState6(str, Enum): +class DatabaseState(str, Enum): all = "All" live = "Live" diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py index 1380f65a6008..14870b5e3e26 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py @@ -54,8 +54,6 @@ from ._job_target_executions_operations import JobTargetExecutionsOperations from ._job_target_groups_operations import JobTargetGroupsOperations from ._job_versions_operations import JobVersionsOperations -from ._long_term_retention_backups_operations import LongTermRetentionBackupsOperations -from ._backup_long_term_retention_policies_operations import BackupLongTermRetentionPoliciesOperations from ._managed_backup_short_term_retention_policies_operations import ManagedBackupShortTermRetentionPoliciesOperations from ._managed_restorable_dropped_database_backup_short_term_retention_policies_operations import ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations from ._server_automatic_tuning_operations import ServerAutomaticTuningOperations @@ -85,7 +83,6 @@ from ._managed_database_sensitivity_labels_operations import ManagedDatabaseSensitivityLabelsOperations from ._instance_pools_operations import InstancePoolsOperations from ._usages_operations import UsagesOperations -from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations from ._servers_operations import ServersOperations from ._capabilities_operations import CapabilitiesOperations @@ -97,6 +94,7 @@ from ._server_azure_ad_administrators_operations import ServerAzureADAdministratorsOperations from ._sync_groups_operations import SyncGroupsOperations from ._sync_members_operations import SyncMembersOperations +from ._import_export_operations import ImportExportOperations from ._managed_databases_operations import ManagedDatabasesOperations from ._managed_database_restore_details_operations import ManagedDatabaseRestoreDetailsOperations from ._server_azure_ad_only_authentications_operations import ServerAzureADOnlyAuthenticationsOperations @@ -104,6 +102,9 @@ from ._managed_instance_azure_ad_only_authentications_operations import ManagedInstanceAzureADOnlyAuthenticationsOperations from ._server_trust_groups_operations import ServerTrustGroupsOperations from ._server_dev_ops_audit_settings_operations import ServerDevOpsAuditSettingsOperations +from ._long_term_retention_backups_operations import LongTermRetentionBackupsOperations +from ._long_term_retention_policies_operations import LongTermRetentionPoliciesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations __all__ = [ 'RecoverableDatabasesOperations', @@ -151,8 +152,6 @@ 'JobTargetExecutionsOperations', 'JobTargetGroupsOperations', 'JobVersionsOperations', - 'LongTermRetentionBackupsOperations', - 'BackupLongTermRetentionPoliciesOperations', 'ManagedBackupShortTermRetentionPoliciesOperations', 'ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations', 'ServerAutomaticTuningOperations', @@ -182,7 +181,6 @@ 'ManagedDatabaseSensitivityLabelsOperations', 'InstancePoolsOperations', 'UsagesOperations', - 'PrivateEndpointConnectionsOperations', 'PrivateLinkResourcesOperations', 'ServersOperations', 'CapabilitiesOperations', @@ -194,6 +192,7 @@ 'ServerAzureADAdministratorsOperations', 'SyncGroupsOperations', 'SyncMembersOperations', + 'ImportExportOperations', 'ManagedDatabasesOperations', 'ManagedDatabaseRestoreDetailsOperations', 'ServerAzureADOnlyAuthenticationsOperations', @@ -201,4 +200,7 @@ 'ManagedInstanceAzureADOnlyAuthenticationsOperations', 'ServerTrustGroupsOperations', 'ServerDevOpsAuditSettingsOperations', + 'LongTermRetentionBackupsOperations', + 'LongTermRetentionPoliciesOperations', + 'PrivateEndpointConnectionsOperations', ] diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_databases_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_databases_operations.py index a2cbd88c163d..4da046c788e8 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_databases_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_databases_operations.py @@ -201,6 +201,115 @@ def internal_paging(next_link=None): return deserialized list_metric_definitions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/metricDefinitions'} + + def _export_initial( + self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): + api_version = "2020-02-02-preview" + + # Construct URL + url = self.export.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'serverName': self._serialize.url("server_name", server_name, 'str'), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ExportDatabaseDefinition') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def export( + self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Exports a database. + + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str + :param server_name: The name of the server. + :type server_name: str + :param database_name: The name of the database. + :type database_name: str + :param parameters: The database export request parameters. + :type parameters: ~azure.mgmt.sql.models.ExportDatabaseDefinition + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + ImportExportOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._export_initial( + resource_group_name=resource_group_name, + server_name=server_name, + database_name=database_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/export'} + def list_by_server( self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config): """Gets a list of databases. @@ -221,7 +330,7 @@ def list_by_server( ~azure.mgmt.sql.models.DatabasePaged[~azure.mgmt.sql.models.Database] :raises: :class:`CloudError` """ - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" def prepare_request(next_link=None): if not next_link: @@ -299,7 +408,7 @@ def get( ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.get.metadata['url'] @@ -348,7 +457,7 @@ def get( def _create_or_update_initial( self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.create_or_update.metadata['url'] @@ -458,7 +567,7 @@ def get_long_running_output(response): def _delete_initial( self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.delete.metadata['url'] @@ -545,7 +654,7 @@ def get_long_running_output(response): def _update_initial( self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.update.metadata['url'] @@ -672,7 +781,7 @@ def list_by_elastic_pool( ~azure.mgmt.sql.models.DatabasePaged[~azure.mgmt.sql.models.Database] :raises: :class:`CloudError` """ - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" def prepare_request(next_link=None): if not next_link: @@ -732,7 +841,7 @@ def internal_paging(next_link=None): def _pause_initial( self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.pause.metadata['url'] @@ -833,7 +942,7 @@ def get_long_running_output(response): def _resume_initial( self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.resume.metadata['url'] @@ -934,7 +1043,7 @@ def get_long_running_output(response): def _failover_initial( self, resource_group_name, server_name, database_name, replica_type=None, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.failover.metadata['url'] @@ -1044,7 +1153,7 @@ def list_inaccessible_by_server( ~azure.mgmt.sql.models.DatabasePaged[~azure.mgmt.sql.models.Database] :raises: :class:`CloudError` """ - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" def prepare_request(next_link=None): if not next_link: @@ -1103,7 +1212,7 @@ def internal_paging(next_link=None): def _upgrade_data_warehouse_initial( self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.upgrade_data_warehouse.metadata['url'] @@ -1212,7 +1321,7 @@ def rename( """ parameters = models.ResourceMoveDefinition(id=id) - api_version = "2020-08-01-preview" + api_version = "2020-11-01-preview" # Construct URL url = self.rename.metadata['url'] @@ -1254,327 +1363,3 @@ def rename( client_raw_response = ClientRawResponse(None, response) return client_raw_response rename.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/move'} - - - def _import_method_initial( - self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" - - # Construct URL - url = self.import_method.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'serverName': self._serialize.url("server_name", server_name, 'str'), - 'databaseName': self._serialize.url("database_name", database_name, 'str'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(parameters, 'ImportExistingDatabaseDefinition') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - def import_method( - self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Imports a bacpac into a new database. - - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str - :param server_name: The name of the server. - :type server_name: str - :param database_name: The name of the database. - :type database_name: str - :param parameters: The database import request parameters. - :type parameters: - ~azure.mgmt.sql.models.ImportExistingDatabaseDefinition - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - ImportExportOperationResult or - ClientRawResponse if raw==True - :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] - :raises: :class:`CloudError` - """ - raw_result = self._import_method_initial( - resource_group_name=resource_group_name, - server_name=server_name, - database_name=database_name, - parameters=parameters, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - import_method.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/import'} - - - def _import_database_initial( - self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" - - # Construct URL - url = self.import_database.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'serverName': self._serialize.url("server_name", server_name, 'str'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(parameters, 'ImportNewDatabaseDefinition') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - def import_database( - self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Imports a bacpac into a new database. - - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str - :param server_name: The name of the server. - :type server_name: str - :param parameters: The database import request parameters. - :type parameters: ~azure.mgmt.sql.models.ImportNewDatabaseDefinition - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - ImportExportOperationResult or - ClientRawResponse if raw==True - :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] - :raises: :class:`CloudError` - """ - raw_result = self._import_database_initial( - resource_group_name=resource_group_name, - server_name=server_name, - parameters=parameters, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - import_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/import'} - - - def _export_initial( - self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): - api_version = "2020-08-01-preview" - - # Construct URL - url = self.export.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'serverName': self._serialize.url("server_name", server_name, 'str'), - 'databaseName': self._serialize.url("database_name", database_name, 'str'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(parameters, 'ExportDatabaseDefinition') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - def export( - self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Exports a database. - - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str - :param server_name: The name of the server. - :type server_name: str - :param database_name: The name of the database. - :type database_name: str - :param parameters: The database export request parameters. - :type parameters: ~azure.mgmt.sql.models.ExportDatabaseDefinition - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - ImportExportOperationResult or - ClientRawResponse if raw==True - :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] - :raises: :class:`CloudError` - """ - raw_result = self._export_initial( - resource_group_name=resource_group_name, - server_name=server_name, - database_name=database_name, - parameters=parameters, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - deserialized = self._deserialize('ImportExportOperationResult', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/export'} diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_import_export_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_import_export_operations.py new file mode 100644 index 000000000000..ce62b16c135a --- /dev/null +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_import_export_operations.py @@ -0,0 +1,150 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class ImportExportOperations(object): + """ImportExportOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for the request. Constant value: "2020-02-02-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2020-02-02-preview" + + self.config = config + + + def _import_method_initial( + self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.import_method.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'serverName': self._serialize.url("server_name", server_name, 'str'), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ImportExistingDatabaseDefinition') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def import_method( + self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Imports a bacpac into a new database. + + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str + :param server_name: The name of the server. + :type server_name: str + :param database_name: The name of the database. + :type database_name: str + :param parameters: The database import request parameters. + :type parameters: + ~azure.mgmt.sql.models.ImportExistingDatabaseDefinition + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + ImportExportOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._import_method_initial( + resource_group_name=resource_group_name, + server_name=server_name, + database_name=database_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + import_method.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/import'} diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_backups_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_backups_operations.py index 55a0bbdd777a..9e59fbcbbebc 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_backups_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_backups_operations.py @@ -27,7 +27,7 @@ class LongTermRetentionBackupsOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: The API version to use for the request. Constant value: "2017-03-01-preview". + :ivar api_version: The API version to use for the request. Constant value: "2020-11-01-preview". """ models = models @@ -37,18 +37,239 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2017-03-01-preview" + self.api_version = "2020-11-01-preview" self.config = config - def get_by_resource_group( - self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): + + def _copy_initial( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, parameters, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.copy.metadata['url'] + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), + 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), + 'backupName': self._serialize.url("backup_name", backup_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'CopyLongTermRetentionBackupParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def copy( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Copy an existing long term retention backup. + + :param location_name: The location of the database. + :type location_name: str + :param long_term_retention_server_name: The name of the server + :type long_term_retention_server_name: str + :param long_term_retention_database_name: The name of the database + :type long_term_retention_database_name: str + :param backup_name: The backup name. + :type backup_name: str + :param parameters: The parameters needed for long term retention copy + request + :type parameters: + ~azure.mgmt.sql.models.CopyLongTermRetentionBackupParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + LongTermRetentionBackupOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._copy_initial( + location_name=location_name, + long_term_retention_server_name=long_term_retention_server_name, + long_term_retention_database_name=long_term_retention_database_name, + backup_name=backup_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + copy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}/copy'} + + + def _update_initial( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, requested_backup_storage_redundancy=None, custom_headers=None, raw=False, **operation_config): + parameters = models.UpdateLongTermRetentionBackupParameters(requested_backup_storage_redundancy=requested_backup_storage_redundancy) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), + 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), + 'backupName': self._serialize.url("backup_name", backup_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'UpdateLongTermRetentionBackupParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, requested_backup_storage_redundancy=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Updates an existing long term retention backup. + + :param location_name: The location of the database. + :type location_name: str + :param long_term_retention_server_name: The name of the server + :type long_term_retention_server_name: str + :param long_term_retention_database_name: The name of the database + :type long_term_retention_database_name: str + :param backup_name: The backup name. + :type backup_name: str + :param requested_backup_storage_redundancy: The storage redundancy + type of the copied backup. Possible values include: 'Geo', 'Local', + 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + LongTermRetentionBackupOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._update_initial( + location_name=location_name, + long_term_retention_server_name=long_term_retention_server_name, + long_term_retention_database_name=long_term_retention_database_name, + backup_name=backup_name, + requested_backup_storage_redundancy=requested_backup_storage_redundancy, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}/update'} + + def get( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): """Gets a long term retention backup. - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str :param location_name: The location of the database. :type location_name: str :param long_term_retention_server_name: The name of the server @@ -68,9 +289,8 @@ def get_by_resource_group( :raises: :class:`CloudError` """ # Construct URL - url = self.get_by_resource_group.metadata['url'] + url = self.get.metadata['url'] path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -111,15 +331,14 @@ def get_by_resource_group( return client_raw_response return deserialized - get_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} + get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} - def _delete_by_resource_group_initial( - self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): + def _delete_initial( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): # Construct URL - url = self.delete_by_resource_group.metadata['url'] + url = self.delete.metadata['url'] path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -154,14 +373,10 @@ def _delete_by_resource_group_initial( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def delete_by_resource_group( - self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, polling=True, **operation_config): + def delete( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes a long term retention backup. - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -181,8 +396,7 @@ def delete_by_resource_group( ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ - raw_result = self._delete_by_resource_group_initial( - resource_group_name=resource_group_name, + raw_result = self._delete_initial( location_name=location_name, long_term_retention_server_name=long_term_retention_server_name, long_term_retention_database_name=long_term_retention_database_name, @@ -204,16 +418,12 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} + delete.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} - def list_by_resource_group_database( - self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_database( + self, location_name, long_term_retention_server_name, long_term_retention_database_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists all long term retention backups for a database. - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -226,8 +436,7 @@ def list_by_resource_group_database( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -241,9 +450,8 @@ def list_by_resource_group_database( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_resource_group_database.metadata['url'] + url = self.list_by_database.metadata['url'] path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -296,16 +504,12 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_resource_group_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups'} + list_by_database.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups'} - def list_by_resource_group_location( - self, resource_group_name, location_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_location( + self, location_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists the long term retention backups for a given location. - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param only_latest_per_database: Whether or not to only get the latest @@ -314,8 +518,7 @@ def list_by_resource_group_location( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -329,9 +532,8 @@ def list_by_resource_group_location( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_resource_group_location.metadata['url'] + url = self.list_by_location.metadata['url'] path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } @@ -382,16 +584,12 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_resource_group_location.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionBackups'} + list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionBackups'} - def list_by_resource_group_server( - self, resource_group_name, location_name, long_term_retention_server_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_server( + self, location_name, long_term_retention_server_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists the long term retention backups for a given server. - :param resource_group_name: The name of the resource group that - contains the resource. You can obtain this value from the Azure - Resource Manager API or the portal. - :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -402,8 +600,7 @@ def list_by_resource_group_server( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -417,9 +614,8 @@ def list_by_resource_group_server( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_resource_group_server.metadata['url'] + url = self.list_by_server.metadata['url'] path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') @@ -471,12 +667,253 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_resource_group_server.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionBackups'} + list_by_server.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionBackups'} - def get( - self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): + + def _copy_by_resource_group_initial( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, parameters, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.copy_by_resource_group.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'locationName': self._serialize.url("location_name", location_name, 'str'), + 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), + 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), + 'backupName': self._serialize.url("backup_name", backup_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'CopyLongTermRetentionBackupParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def copy_by_resource_group( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Copy an existing long term retention backup to a different server. + + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str + :param location_name: The location of the database. + :type location_name: str + :param long_term_retention_server_name: The name of the server + :type long_term_retention_server_name: str + :param long_term_retention_database_name: The name of the database + :type long_term_retention_database_name: str + :param backup_name: The backup name. + :type backup_name: str + :param parameters: The parameters needed for long term retention copy + request + :type parameters: + ~azure.mgmt.sql.models.CopyLongTermRetentionBackupParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + LongTermRetentionBackupOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._copy_by_resource_group_initial( + resource_group_name=resource_group_name, + location_name=location_name, + long_term_retention_server_name=long_term_retention_server_name, + long_term_retention_database_name=long_term_retention_database_name, + backup_name=backup_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + copy_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}/copy'} + + + def _update_by_resource_group_initial( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, requested_backup_storage_redundancy=None, custom_headers=None, raw=False, **operation_config): + parameters = models.UpdateLongTermRetentionBackupParameters(requested_backup_storage_redundancy=requested_backup_storage_redundancy) + + # Construct URL + url = self.update_by_resource_group.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'locationName': self._serialize.url("location_name", location_name, 'str'), + 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), + 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), + 'backupName': self._serialize.url("backup_name", backup_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'UpdateLongTermRetentionBackupParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update_by_resource_group( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, requested_backup_storage_redundancy=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Updates an existing long term retention backup. + + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str + :param location_name: The location of the database. + :type location_name: str + :param long_term_retention_server_name: The name of the server + :type long_term_retention_server_name: str + :param long_term_retention_database_name: The name of the database + :type long_term_retention_database_name: str + :param backup_name: The backup name. + :type backup_name: str + :param requested_backup_storage_redundancy: The storage redundancy + type of the copied backup. Possible values include: 'Geo', 'Local', + 'Zone' + :type requested_backup_storage_redundancy: str or + ~azure.mgmt.sql.models.RequestedBackupStorageRedundancy + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + LongTermRetentionBackupOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.LongTermRetentionBackupOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._update_by_resource_group_initial( + resource_group_name=resource_group_name, + location_name=location_name, + long_term_retention_server_name=long_term_retention_server_name, + long_term_retention_database_name=long_term_retention_database_name, + backup_name=backup_name, + requested_backup_storage_redundancy=requested_backup_storage_redundancy, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('LongTermRetentionBackupOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}/update'} + + def get_by_resource_group( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): """Gets a long term retention backup. + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str :param location_name: The location of the database. :type location_name: str :param long_term_retention_server_name: The name of the server @@ -496,8 +933,9 @@ def get( :raises: :class:`CloudError` """ # Construct URL - url = self.get.metadata['url'] + url = self.get_by_resource_group.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -538,14 +976,15 @@ def get( return client_raw_response return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} + get_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} - def _delete_initial( - self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): + def _delete_by_resource_group_initial( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, **operation_config): # Construct URL - url = self.delete.metadata['url'] + url = self.delete_by_resource_group.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -580,10 +1019,14 @@ def _delete_initial( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def delete( - self, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, polling=True, **operation_config): + def delete_by_resource_group( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, backup_name, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes a long term retention backup. + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -603,7 +1046,8 @@ def delete( ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ - raw_result = self._delete_initial( + raw_result = self._delete_by_resource_group_initial( + resource_group_name=resource_group_name, location_name=location_name, long_term_retention_server_name=long_term_retention_server_name, long_term_retention_database_name=long_term_retention_database_name, @@ -625,12 +1069,16 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} + delete_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups/{backupName}'} - def list_by_database( - self, location_name, long_term_retention_server_name, long_term_retention_database_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_resource_group_database( + self, resource_group_name, location_name, long_term_retention_server_name, long_term_retention_database_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists all long term retention backups for a database. + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -643,8 +1091,7 @@ def list_by_database( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -658,8 +1105,9 @@ def list_by_database( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_database.metadata['url'] + url = self.list_by_resource_group_database.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'longTermRetentionDatabaseName': self._serialize.url("long_term_retention_database_name", long_term_retention_database_name, 'str'), @@ -712,12 +1160,16 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_database.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups'} + list_by_resource_group_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionDatabases/{longTermRetentionDatabaseName}/longTermRetentionBackups'} - def list_by_location( - self, location_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_resource_group_location( + self, resource_group_name, location_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists the long term retention backups for a given location. + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param only_latest_per_database: Whether or not to only get the latest @@ -726,8 +1178,7 @@ def list_by_location( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -741,8 +1192,9 @@ def list_by_location( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_location.metadata['url'] + url = self.list_by_resource_group_location.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } @@ -793,12 +1245,16 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionBackups'} + list_by_resource_group_location.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionBackups'} - def list_by_server( - self, location_name, long_term_retention_server_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): + def list_by_resource_group_server( + self, resource_group_name, location_name, long_term_retention_server_name, only_latest_per_database=None, database_state=None, custom_headers=None, raw=False, **operation_config): """Lists the long term retention backups for a given server. + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str :param location_name: The location of the database :type location_name: str :param long_term_retention_server_name: The name of the server @@ -809,8 +1265,7 @@ def list_by_server( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or - ~azure.mgmt.sql.models.LongTermRetentionDatabaseState + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -824,8 +1279,9 @@ def list_by_server( def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_server.metadata['url'] + url = self.list_by_resource_group_server.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'longTermRetentionServerName': self._serialize.url("long_term_retention_server_name", long_term_retention_server_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') @@ -877,4 +1333,4 @@ def internal_paging(next_link=None): deserialized = models.LongTermRetentionBackupPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_server.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionBackups'} + list_by_resource_group_server.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/locations/{locationName}/longTermRetentionServers/{longTermRetentionServerName}/longTermRetentionBackups'} diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_managed_instance_backups_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_managed_instance_backups_operations.py index 6de0045d6600..417faaf788d4 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_managed_instance_backups_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_managed_instance_backups_operations.py @@ -212,7 +212,7 @@ def list_by_database( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState1 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -297,7 +297,7 @@ def list_by_instance( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState2 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -380,7 +380,7 @@ def list_by_location( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState3 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -635,7 +635,7 @@ def list_by_resource_group_database( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState4 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -725,7 +725,7 @@ def list_by_resource_group_instance( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState5 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -813,7 +813,7 @@ def list_by_resource_group_location( :param database_state: Whether to query against just live databases, just deleted databases, or all databases. Possible values include: 'All', 'Live', 'Deleted' - :type database_state: str or ~azure.mgmt.sql.models.DatabaseState6 + :type database_state: str or ~azure.mgmt.sql.models.DatabaseState :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_backup_long_term_retention_policies_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_policies_operations.py similarity index 75% rename from sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_backup_long_term_retention_policies_operations.py rename to sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_policies_operations.py index 532ce23b9aef..b4f0d27a5a1c 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_backup_long_term_retention_policies_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_long_term_retention_policies_operations.py @@ -18,8 +18,8 @@ from .. import models -class BackupLongTermRetentionPoliciesOperations(object): - """BackupLongTermRetentionPoliciesOperations operations. +class LongTermRetentionPoliciesOperations(object): + """LongTermRetentionPoliciesOperations operations. You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. @@ -28,7 +28,7 @@ class BackupLongTermRetentionPoliciesOperations(object): :param serializer: An object model serializer. :param deserializer: An object model deserializer. :ivar policy_name: The policy name. Should always be Default. Constant value: "default". - :ivar api_version: The API version to use for the request. Constant value: "2017-03-01-preview". + :ivar api_version: The API version to use for the request. Constant value: "2020-11-01-preview". """ models = models @@ -39,7 +39,7 @@ def __init__(self, client, config, serializer, deserializer): self._serialize = serializer self._deserialize = deserializer self.policy_name = "default" - self.api_version = "2017-03-01-preview" + self.api_version = "2020-11-01-preview" self.config = config @@ -60,9 +60,8 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: BackupLongTermRetentionPolicy or ClientRawResponse if - raw=true - :rtype: ~azure.mgmt.sql.models.BackupLongTermRetentionPolicy or + :return: LongTermRetentionPolicy or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.sql.models.LongTermRetentionPolicy or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ @@ -102,7 +101,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('BackupLongTermRetentionPolicy', response) + deserialized = self._deserialize('LongTermRetentionPolicy', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) @@ -141,7 +140,7 @@ def _create_or_update_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body - body_content = self._serialize.body(parameters, 'BackupLongTermRetentionPolicy') + body_content = self._serialize.body(parameters, 'LongTermRetentionPolicy') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) @@ -155,7 +154,7 @@ def _create_or_update_initial( deserialized = None if response.status_code == 200: - deserialized = self._deserialize('BackupLongTermRetentionPolicy', response) + deserialized = self._deserialize('LongTermRetentionPolicy', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) @@ -176,19 +175,18 @@ def create_or_update( :param database_name: The name of the database. :type database_name: str :param parameters: The long term retention policy info. - :type parameters: ~azure.mgmt.sql.models.BackupLongTermRetentionPolicy + :type parameters: ~azure.mgmt.sql.models.LongTermRetentionPolicy :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy - :return: An instance of LROPoller that returns - BackupLongTermRetentionPolicy or - ClientRawResponse if raw==True + :return: An instance of LROPoller that returns LongTermRetentionPolicy + or ClientRawResponse if raw==True :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.BackupLongTermRetentionPolicy] + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.LongTermRetentionPolicy] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.BackupLongTermRetentionPolicy]] + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.LongTermRetentionPolicy]] :raises: :class:`CloudError` """ raw_result = self._create_or_update_initial( @@ -202,7 +200,7 @@ def create_or_update( ) def get_long_running_output(response): - deserialized = self._deserialize('BackupLongTermRetentionPolicy', response) + deserialized = self._deserialize('LongTermRetentionPolicy', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) @@ -236,52 +234,62 @@ def list_by_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: BackupLongTermRetentionPolicy or ClientRawResponse if - raw=true - :rtype: ~azure.mgmt.sql.models.BackupLongTermRetentionPolicy or - ~msrest.pipeline.ClientRawResponse + :return: An iterator like instance of LongTermRetentionPolicy + :rtype: + ~azure.mgmt.sql.models.LongTermRetentionPolicyPaged[~azure.mgmt.sql.models.LongTermRetentionPolicy] :raises: :class:`CloudError` """ - # Construct URL - url = self.list_by_database.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'serverName': self._serialize.url("server_name", server_name, 'str'), - 'databaseName': self._serialize.url("database_name", database_name, 'str'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('BackupLongTermRetentionPolicy', response) - + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_database.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'serverName': self._serialize.url("server_name", server_name, 'str'), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response + header_dict = {} + deserialized = models.LongTermRetentionPolicyPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/backupLongTermRetentionPolicies'} diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_private_endpoint_connections_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_private_endpoint_connections_operations.py index ee40d48d7f2a..9f04fac263d4 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_private_endpoint_connections_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_private_endpoint_connections_operations.py @@ -27,7 +27,7 @@ class PrivateEndpointConnectionsOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: The API version to use for the request. Constant value: "2018-06-01-preview". + :ivar api_version: The API version to use for the request. Constant value: "2020-11-01-preview". """ models = models @@ -37,7 +37,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-06-01-preview" + self.api_version = "2020-11-01-preview" self.config = config diff --git a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_servers_operations.py b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_servers_operations.py index 7abf430deda2..ebba98c1f7bf 100644 --- a/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_servers_operations.py +++ b/sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_servers_operations.py @@ -27,7 +27,6 @@ class ServersOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: The API version to use for the request. Constant value: "2019-06-01-preview". """ models = models @@ -37,7 +36,6 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-06-01-preview" self.config = config @@ -59,6 +57,8 @@ def list_by_resource_group( ~azure.mgmt.sql.models.ServerPaged[~azure.mgmt.sql.models.Server] :raises: :class:`CloudError` """ + api_version = "2019-06-01-preview" + def prepare_request(next_link=None): if not next_link: # Construct URL @@ -71,7 +71,7 @@ def prepare_request(next_link=None): # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link @@ -132,6 +132,8 @@ def get( ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ + api_version = "2019-06-01-preview" + # Construct URL url = self.get.metadata['url'] path_format_arguments = { @@ -143,7 +145,7 @@ def get( # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} @@ -178,6 +180,8 @@ def get( def _create_or_update_initial( self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config): + api_version = "2019-06-01-preview" + # Construct URL url = self.create_or_update.metadata['url'] path_format_arguments = { @@ -189,7 +193,7 @@ def _create_or_update_initial( # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} @@ -282,6 +286,8 @@ def get_long_running_output(response): def _delete_initial( self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config): + api_version = "2019-06-01-preview" + # Construct URL url = self.delete.metadata['url'] path_format_arguments = { @@ -293,7 +299,7 @@ def _delete_initial( # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} @@ -363,6 +369,8 @@ def get_long_running_output(response): def _update_initial( self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config): + api_version = "2019-06-01-preview" + # Construct URL url = self.update.metadata['url'] path_format_arguments = { @@ -374,7 +382,7 @@ def _update_initial( # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} @@ -476,6 +484,8 @@ def list( ~azure.mgmt.sql.models.ServerPaged[~azure.mgmt.sql.models.Server] :raises: :class:`CloudError` """ + api_version = "2019-06-01-preview" + def prepare_request(next_link=None): if not next_link: # Construct URL @@ -487,7 +497,7 @@ def prepare_request(next_link=None): # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link @@ -547,6 +557,8 @@ def check_name_availability( """ parameters = models.CheckNameAvailabilityRequest(name=name) + api_version = "2019-06-01-preview" + # Construct URL url = self.check_name_availability.metadata['url'] path_format_arguments = { @@ -556,7 +568,7 @@ def check_name_availability( # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} @@ -591,3 +603,108 @@ def check_name_availability( return deserialized check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/checkNameAvailability'} + + + def _import_database_initial( + self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config): + api_version = "2020-02-02-preview" + + # Construct URL + url = self.import_database.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'serverName': self._serialize.url("server_name", server_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ImportNewDatabaseDefinition') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def import_database( + self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Imports a bacpac into a new database. + + :param resource_group_name: The name of the resource group that + contains the resource. You can obtain this value from the Azure + Resource Manager API or the portal. + :type resource_group_name: str + :param server_name: The name of the server. + :type server_name: str + :param parameters: The database import request parameters. + :type parameters: ~azure.mgmt.sql.models.ImportNewDatabaseDefinition + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + ImportExportOperationResult or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.ImportExportOperationResult] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.ImportExportOperationResult]] + :raises: :class:`CloudError` + """ + raw_result = self._import_database_initial( + resource_group_name=resource_group_name, + server_name=server_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ImportExportOperationResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + import_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/import'}