diff --git a/src/storageimportexport/HISTORY.rst b/src/storageimportexport/HISTORY.rst new file mode 100644 index 00000000000..1c139576ba0 --- /dev/null +++ b/src/storageimportexport/HISTORY.rst @@ -0,0 +1,8 @@ +.. :changelog: + +Release History +=============== + +0.1.0 +++++++ +* Initial release. diff --git a/src/storageimportexport/README.md b/src/storageimportexport/README.md new file mode 100644 index 00000000000..d41718160d0 --- /dev/null +++ b/src/storageimportexport/README.md @@ -0,0 +1,71 @@ +# Azure CLI storageimportexport Extension # +This is the extension for storageimportexport + +### How to use ### +Install this extension using the below CLI command +``` +az extension add --name storageimportexport +``` + +### Included Features ### +#### storageimportexport location #### +##### List ##### +``` +az storageimportexport location list +``` +##### Show ##### +``` +az storageimportexport location show --name "West US" +``` +#### storageimportexport job #### +##### Create ##### +``` +az storageimportexport job create --location "West US" --backup-drive-manifest true \ + --diagnostics-path "waimportexport" --export blob-path-prefix="/" --job-type "Export" --log-level "Verbose" \ + --return-address city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" postal-code="98007" recipient-name="Test" state-or-province="wa" street-address1="Street1" street-address2="street2" \ + --return-shipping carrier-account-number="989ffff" carrier-name="FedEx" \ + --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" \ + --name "myExportJob" --resource-group "myResourceGroup" +``` +##### Create ##### +``` +az storageimportexport job create --location "West US" --backup-drive-manifest true \ + --diagnostics-path "waimportexport" \ + --drive-list bit-locker-key="238810-662376-448998-450120-652806-203390-606320-483076" drive-header-hash="0:1048576:FB6B6ED500D49DA6E0D723C98D42C657F2881CC13357C28DCECA6A524F1292501571A321238540E621AB5BD9C9A32637615919A75593E6CB5C1515DAE341CABF;135266304:143360:C957A189AFC38C4E80731252301EB91427CE55E61448FA3C73C6FDDE70ABBC197947EC8D0249A2C639BB10B95957D5820A4BE8DFBBF76FFFA688AE5CE0D42EC3" drive-id="9CA995BB" manifest-file="\\\\8a0c23f7-14b7-470a-9633-fcd46590a1bc.manifest" manifest-hash="4228EC5D8E048CB9B515338C789314BE8D0B2FDBC7C7A0308E1C826242CDE74E" \ + --job-type "Import" --log-level "Verbose" \ + --return-address city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" postal-code="98007" recipient-name="Test" state-or-province="wa" street-address1="Street1" street-address2="street2" \ + --return-shipping carrier-account-number="989ffff" carrier-name="FedEx" \ + --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" \ + --name "myJob" --resource-group "myResourceGroup" +``` +##### Show ##### +``` +az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" +``` +##### List ##### +``` +az storageimportexport job list --resource-group "myResourceGroup" +``` +##### Show ##### +``` +az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" +``` +##### Update ##### +``` +az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name "myExportJob" \ + --resource-group "myResourceGroup" +``` +##### Update ##### +``` +az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name "myJob" \ + --resource-group "myResourceGroup" +``` +##### Delete ##### +``` +az storageimportexport job delete --name "myJob" --resource-group "myResourceGroup" +``` +#### storageimportexport bit-locker-key #### +##### List ##### +``` +az storageimportexport bit-locker-key list --job-name "myJob" --resource-group "myResourceGroup" +``` \ No newline at end of file diff --git a/src/storageimportexport/azext_storageimportexport/__init__.py b/src/storageimportexport/azext_storageimportexport/__init__.py new file mode 100644 index 00000000000..7c556bd22c4 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/__init__.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from azure.cli.core import AzCommandsLoader +from azext_storageimportexport.generated._help import helps # pylint: disable=unused-import +try: + from azext_storageimportexport.manual._help import helps # pylint: disable=reimported +except ImportError: + pass + + +class StorageImportExportCommandsLoader(AzCommandsLoader): + + def __init__(self, cli_ctx=None): + from azure.cli.core.commands import CliCommandType + from azext_storageimportexport.generated._client_factory import cf_storageimportexport_cl + storageimportexport_custom = CliCommandType( + operations_tmpl='azext_storageimportexport.custom#{}', + client_factory=cf_storageimportexport_cl) + parent = super(StorageImportExportCommandsLoader, self) + parent.__init__(cli_ctx=cli_ctx, custom_command_type=storageimportexport_custom) + + def load_command_table(self, args): + from azext_storageimportexport.generated.commands import load_command_table + load_command_table(self, args) + try: + from azext_storageimportexport.manual.commands import load_command_table as load_command_table_manual + load_command_table_manual(self, args) + except ImportError: + pass + return self.command_table + + def load_arguments(self, command): + from azext_storageimportexport.generated._params import load_arguments + load_arguments(self, command) + try: + from azext_storageimportexport.manual._params import load_arguments as load_arguments_manual + load_arguments_manual(self, command) + except ImportError: + pass + + +COMMAND_LOADER_CLS = StorageImportExportCommandsLoader diff --git a/src/storageimportexport/azext_storageimportexport/action.py b/src/storageimportexport/azext_storageimportexport/action.py new file mode 100644 index 00000000000..d95d53bf711 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/action.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.action import * # noqa: F403 +try: + from .manual.action import * # noqa: F403 +except ImportError: + pass diff --git a/src/storageimportexport/azext_storageimportexport/azext_metadata.json b/src/storageimportexport/azext_storageimportexport/azext_metadata.json new file mode 100644 index 00000000000..cfc30c747c7 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/azext_metadata.json @@ -0,0 +1,4 @@ +{ + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.15.0" +} \ No newline at end of file diff --git a/src/storageimportexport/azext_storageimportexport/custom.py b/src/storageimportexport/azext_storageimportexport/custom.py new file mode 100644 index 00000000000..dbe9d5f9742 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/custom.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.custom import * # noqa: F403 +try: + from .manual.custom import * # noqa: F403 +except ImportError: + pass diff --git a/src/storageimportexport/azext_storageimportexport/generated/__init__.py b/src/storageimportexport/azext_storageimportexport/generated/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/storageimportexport/azext_storageimportexport/generated/_client_factory.py b/src/storageimportexport/azext_storageimportexport/generated/_client_factory.py new file mode 100644 index 00000000000..41ced3700a6 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/_client_factory.py @@ -0,0 +1,28 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_storageimportexport_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from azext_storageimportexport.vendored_sdks.storageimportexport import StorageImportExport + return get_mgmt_service_client(cli_ctx, + StorageImportExport) + + +def cf_location(cli_ctx, *_): + return cf_storageimportexport_cl(cli_ctx).locations + + +def cf_job(cli_ctx, *_): + return cf_storageimportexport_cl(cli_ctx).jobs + + +def cf_bit_locker_key(cli_ctx, *_): + return cf_storageimportexport_cl(cli_ctx).bit_locker_keys diff --git a/src/storageimportexport/azext_storageimportexport/generated/_help.py b/src/storageimportexport/azext_storageimportexport/generated/_help.py new file mode 100644 index 00000000000..dc7c0b0bf96 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/_help.py @@ -0,0 +1,290 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['storageimportexport location'] = """ + type: group + short-summary: Manage location with storageimportexport +""" + +helps['storageimportexport location list'] = """ + type: command + short-summary: "Returns a list of locations to which you can ship the disks associated with an import or export \ +job. A location is a Microsoft data center region." + examples: + - name: List locations + text: |- + az storageimportexport location list +""" + +helps['storageimportexport location show'] = """ + type: command + short-summary: "Returns the details about a location to which you can ship the disks associated with an import or \ +export job. A location is an Azure region." + examples: + - name: Get locations + text: |- + az storageimportexport location show --name "West US" +""" + +helps['storageimportexport job'] = """ + type: group + short-summary: Manage job with storageimportexport +""" + +helps['storageimportexport job list'] = """ + type: command + short-summary: "Returns all active and completed jobs in a resource group. And Returns all active and completed \ +jobs in a subscription." + examples: + - name: List jobs in a resource group + text: |- + az storageimportexport job list --resource-group "myResourceGroup" + - name: List jobs in a subscription + text: |- + az storageimportexport job list +""" + +helps['storageimportexport job show'] = """ + type: command + short-summary: "Gets information about an existing job." + examples: + - name: Get export job + text: |- + az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" + - name: Get import job + text: |- + az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" +""" + +helps['storageimportexport job create'] = """ + type: command + short-summary: "Creates a new job or updates an existing job in the specified subscription." + parameters: + - name: --return-address + short-summary: "Specifies the return address information for the job." + long-summary: | + Usage: --return-address recipient-name=XX street-address1=XX street-address2=XX city=XX \ +state-or-province=XX postal-code=XX country-or-region=XX phone=XX email=XX + + recipient-name: Required. The name of the recipient who will receive the hard drives when they are \ +returned. + street-address1: Required. The first line of the street address to use when returning the drives. + street-address2: The second line of the street address to use when returning the drives. + city: Required. The city name to use when returning the drives. + state-or-province: The state or province to use when returning the drives. + postal-code: Required. The postal code to use when returning the drives. + country-or-region: Required. The country or region to use when returning the drives. + phone: Required. Phone number of the recipient of the returned drives. + email: Required. Email address of the recipient of the returned drives. + - name: --return-shipping + short-summary: "Specifies the return carrier and customer's account with the carrier." + long-summary: | + Usage: --return-shipping carrier-name=XX carrier-account-number=XX + + carrier-name: Required. The carrier's name. + carrier-account-number: Required. The customer's account number with the carrier. + - name: --shipping-information + short-summary: "Contains information about the Microsoft datacenter to which the drives should be shipped." + long-summary: | + Usage: --shipping-information recipient-name=XX street-address1=XX street-address2=XX city=XX \ +state-or-province=XX postal-code=XX country-or-region=XX phone=XX + + recipient-name: The name of the recipient who will receive the hard drives when they are returned. + street-address1: The first line of the street address to use when returning the drives. + street-address2: The second line of the street address to use when returning the drives. + city: The city name to use when returning the drives. + state-or-province: The state or province to use when returning the drives. + postal-code: The postal code to use when returning the drives. + country-or-region: The country or region to use when returning the drives. + phone: Phone number of the recipient of the returned drives. + - name: --delivery-package + short-summary: "Contains information about the package being shipped by the customer to the Microsoft data \ +center." + long-summary: | + Usage: --delivery-package carrier-name=XX tracking-number=XX drive-count=XX ship-date=XX + + carrier-name: Required. The name of the carrier that is used to ship the import or export drives. + tracking-number: Required. The tracking number of the package. + drive-count: The number of drives included in the package. + ship-date: The date when the package is shipped. + - name: --return-package + short-summary: "Contains information about the package being shipped from the Microsoft data center to the \ +customer to return the drives. The format is the same as the deliveryPackage property above. This property is not \ +included if the drives have not yet been returned." + long-summary: | + Usage: --return-package carrier-name=XX tracking-number=XX drive-count=XX ship-date=XX + + carrier-name: Required. The name of the carrier that is used to ship the import or export drives. + tracking-number: Required. The tracking number of the package. + drive-count: Required. The number of drives included in the package. + ship-date: Required. The date when the package is shipped. + - name: --drive-list + short-summary: "List of up to ten drives that comprise the job. The drive list is a required element for an \ +import job; it is not specified for export jobs." + long-summary: | + Usage: --drive-list drive-id=XX bit-locker-key=XX manifest-file=XX manifest-hash=XX drive-header-hash=XX \ +state=XX copy-status=XX percent-complete=XX verbose-log-uri=XX error-log-uri=XX manifest-uri=XX bytes-succeeded=XX + + drive-id: The drive's hardware serial number, without spaces. + bit-locker-key: The BitLocker key used to encrypt the drive. + manifest-file: The relative path of the manifest file on the drive. + manifest-hash: The Base16-encoded MD5 hash of the manifest file on the drive. + drive-header-hash: The drive header hash value. + state: The drive's current state. + copy-status: Detailed status about the data transfer process. This field is not returned in the response \ +until the drive is in the Transferring state. + percent-complete: Percentage completed for the drive. + verbose-log-uri: A URI that points to the blob containing the verbose log for the data transfer operation. + error-log-uri: A URI that points to the blob containing the error log for the data transfer operation. + manifest-uri: A URI that points to the blob containing the drive manifest file. + bytes-succeeded: Bytes successfully transferred for the drive. + + Multiple actions can be specified by using more than one --drive-list argument. + - name: --export + short-summary: "A property containing information about the blobs to be exported for an export job. This \ +property is included for export jobs only." + long-summary: | + Usage: --export blob-list-blob-path=XX blob-path=XX blob-path-prefix=XX + + blob-list-blob-path: The relative URI to the block blob that contains the list of blob paths or blob path \ +prefixes as defined above, beginning with the container name. If the blob is in root container, the URI must begin \ +with $root. + blob-path: A collection of blob-path strings. + blob-path-prefix: A collection of blob-prefix strings. + - name: --encryption-key + short-summary: "Contains information about the encryption key." + long-summary: | + Usage: --encryption-key kek-type=XX kek-url=XX kek-vault-resource-id=XX + + kek-type: The type of kek encryption key + kek-url: Specifies the url for kek encryption key. + kek-vault-resource-id: Specifies the keyvault resource id for kek encryption key. + examples: + - name: Create export job + text: |- + az storageimportexport job create --location "West US" --backup-drive-manifest true --diagnostics-path \ +"waimportexport" --export blob-path-prefix="/" --job-type "Export" --log-level "Verbose" --return-address \ +city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" postal-code="98007" \ +recipient-name="Test" state-or-province="wa" street-address1="Street1" street-address2="street2" --return-shipping \ +carrier-account-number="989ffff" carrier-name="FedEx" --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxx\ +xxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" --name "myExportJob" \ +--resource-group "myResourceGroup" + - name: Create import job + text: |- + az storageimportexport job create --location "West US" --backup-drive-manifest true --diagnostics-path \ +"waimportexport" --drive-list bit-locker-key="238810-662376-448998-450120-652806-203390-606320-483076" \ +drive-header-hash="0:1048576:FB6B6ED500D49DA6E0D723C98D42C657F2881CC13357C28DCECA6A524F1292501571A321238540E621AB5BD9C9\ +A32637615919A75593E6CB5C1515DAE341CABF;135266304:143360:C957A189AFC38C4E80731252301EB91427CE55E61448FA3C73C6FDDE70ABBC1\ +97947EC8D0249A2C639BB10B95957D5820A4BE8DFBBF76FFFA688AE5CE0D42EC3" drive-id="9CA995BB" manifest-file="\\\\8a0c23f7-14b7\ +-470a-9633-fcd46590a1bc.manifest" manifest-hash="4228EC5D8E048CB9B515338C789314BE8D0B2FDBC7C7A0308E1C826242CDE74E" \ +--job-type "Import" --log-level "Verbose" --return-address city="Redmond" country-or-region="USA" \ +email="Test@contoso.com" phone="4250000000" postal-code="98007" recipient-name="Test" state-or-province="wa" \ +street-address1="Street1" street-address2="street2" --return-shipping carrier-account-number="989ffff" \ +carrier-name="FedEx" --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourc\ +eGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" --name "myJob" --resource-group "myResourceGroup" +""" + +helps['storageimportexport job update'] = """ + type: command + short-summary: "Updates specific properties of a job. You can call this operation to notify the Import/Export \ +service that the hard drives comprising the import or export job have been shipped to the Microsoft data center. It \ +can also be used to cancel an existing job." + parameters: + - name: --return-address + short-summary: "Specifies the return address information for the job." + long-summary: | + Usage: --return-address recipient-name=XX street-address1=XX street-address2=XX city=XX \ +state-or-province=XX postal-code=XX country-or-region=XX phone=XX email=XX + + recipient-name: Required. The name of the recipient who will receive the hard drives when they are \ +returned. + street-address1: Required. The first line of the street address to use when returning the drives. + street-address2: The second line of the street address to use when returning the drives. + city: Required. The city name to use when returning the drives. + state-or-province: The state or province to use when returning the drives. + postal-code: Required. The postal code to use when returning the drives. + country-or-region: Required. The country or region to use when returning the drives. + phone: Required. Phone number of the recipient of the returned drives. + email: Required. Email address of the recipient of the returned drives. + - name: --return-shipping + short-summary: "Specifies the return carrier and customer's account with the carrier." + long-summary: | + Usage: --return-shipping carrier-name=XX carrier-account-number=XX + + carrier-name: Required. The carrier's name. + carrier-account-number: Required. The customer's account number with the carrier. + - name: --delivery-package + short-summary: "Contains information about the package being shipped by the customer to the Microsoft data \ +center." + long-summary: | + Usage: --delivery-package carrier-name=XX tracking-number=XX drive-count=XX ship-date=XX + + carrier-name: Required. The name of the carrier that is used to ship the import or export drives. + tracking-number: Required. The tracking number of the package. + drive-count: The number of drives included in the package. + ship-date: The date when the package is shipped. + - name: --drive-list + short-summary: "List of drives that comprise the job." + long-summary: | + Usage: --drive-list drive-id=XX bit-locker-key=XX manifest-file=XX manifest-hash=XX drive-header-hash=XX \ +state=XX copy-status=XX percent-complete=XX verbose-log-uri=XX error-log-uri=XX manifest-uri=XX bytes-succeeded=XX + + drive-id: The drive's hardware serial number, without spaces. + bit-locker-key: The BitLocker key used to encrypt the drive. + manifest-file: The relative path of the manifest file on the drive. + manifest-hash: The Base16-encoded MD5 hash of the manifest file on the drive. + drive-header-hash: The drive header hash value. + state: The drive's current state. + copy-status: Detailed status about the data transfer process. This field is not returned in the response \ +until the drive is in the Transferring state. + percent-complete: Percentage completed for the drive. + verbose-log-uri: A URI that points to the blob containing the verbose log for the data transfer operation. + error-log-uri: A URI that points to the blob containing the error log for the data transfer operation. + manifest-uri: A URI that points to the blob containing the drive manifest file. + bytes-succeeded: Bytes successfully transferred for the drive. + + Multiple actions can be specified by using more than one --drive-list argument. + examples: + - name: Update export job + text: |- + az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name \ +"myExportJob" --resource-group "myResourceGroup" + - name: Update import job + text: |- + az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name \ +"myJob" --resource-group "myResourceGroup" +""" + +helps['storageimportexport job delete'] = """ + type: command + short-summary: "Deletes an existing job. Only jobs in the Creating or Completed states can be deleted." + examples: + - name: Delete job + text: |- + az storageimportexport job delete --name "myJob" --resource-group "myResourceGroup" +""" + +helps['storageimportexport bit-locker-key'] = """ + type: group + short-summary: Manage bit locker key with storageimportexport +""" + +helps['storageimportexport bit-locker-key list'] = """ + type: command + short-summary: "Returns the BitLocker Keys for all drives in the specified job." + examples: + - name: List BitLocker Keys for drives in a job + text: |- + az storageimportexport bit-locker-key list --job-name "myJob" --resource-group "myResourceGroup" +""" diff --git a/src/storageimportexport/azext_storageimportexport/generated/_params.py b/src/storageimportexport/azext_storageimportexport/generated/_params.py new file mode 100644 index 00000000000..0a63cec38d1 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/_params.py @@ -0,0 +1,126 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + tags_type, + get_three_state_flag, + resource_group_name_type, + get_location_type +) +from azure.cli.core.commands.validators import get_default_location_from_resource_group +from azext_storageimportexport.action import ( + AddReturnAddress, + AddReturnShipping, + AddShippingInformation, + AddDeliveryPackage, + AddReturnPackage, + AddStorageimportexportJobCreateDriveList, + AddExport, + AddEncryptionKey, + AddStorageimportexportJobUpdateDriveList +) + + +def load_arguments(self, _): + + with self.argument_context('storageimportexport location show') as c: + c.argument('location_name', options_list=['--name', '-n', '--location-name'], type=str, help='The name of the ' + 'location. For example, West US or westus.') + + with self.argument_context('storageimportexport job list') as c: + c.argument('top', type=int, help='An integer value that specifies how many jobs at most should be returned. ' + 'The value cannot exceed 100.') + c.argument('filter_', options_list=['--filter'], type=str, help='Can be used to restrict the results to ' + 'certain conditions.') + c.argument('resource_group_name', resource_group_name_type) + + with self.argument_context('storageimportexport job show') as c: + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the ' + 'import/export job.', id_part='name') + c.argument('resource_group_name', resource_group_name_type) + + with self.argument_context('storageimportexport job create') as c: + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the ' + 'import/export job.') + c.argument('resource_group_name', resource_group_name_type) + c.argument('client_tenant_id', type=str, help='The tenant ID of the client making the request.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('storage_account_id', type=str, help='The resource identifier of the storage account where data ' + 'will be imported to or exported from.') + c.argument('job_type', type=str, help='The type of job') + c.argument('return_address', action=AddReturnAddress, nargs='+', help='Specifies the return address ' + 'information for the job.') + c.argument('return_shipping', action=AddReturnShipping, nargs='+', help='Specifies the return carrier and ' + 'customer\'s account with the carrier.') + c.argument('shipping_information', action=AddShippingInformation, nargs='+', help='Contains information about ' + 'the Microsoft datacenter to which the drives should be shipped.') + c.argument('delivery_package', action=AddDeliveryPackage, nargs='+', help='Contains information about the ' + 'package being shipped by the customer to the Microsoft data center.') + c.argument('return_package', action=AddReturnPackage, nargs='+', help='Contains information about the package ' + 'being shipped from the Microsoft data center to the customer to return the drives. The format is ' + 'the same as the deliveryPackage property above. This property is not included if the drives have ' + 'not yet been returned.') + c.argument('diagnostics_path', type=str, help='The virtual blob directory to which the copy logs and backups ' + 'of drive manifest files (if enabled) will be stored.') + c.argument('log_level', type=str, help='Default value is Error. Indicates whether error logging or verbose ' + 'logging will be enabled.') + c.argument('backup_drive_manifest', arg_type=get_three_state_flag(), help='Default value is false. Indicates ' + 'whether the manifest files on the drives should be copied to block blobs.') + c.argument('state', type=str, help='Current state of the job.') + c.argument('cancel_requested', arg_type=get_three_state_flag(), help='Indicates whether a request has been ' + 'submitted to cancel the job.') + c.argument('percent_complete', type=int, help='Overall percentage completed for the job.') + c.argument('incomplete_blob_list_uri', type=str, help='A blob path that points to a block blob containing a ' + 'list of blob names that were not exported due to insufficient drive space. If all blobs were ' + 'exported successfully, then this element is not included in the response.') + c.argument('drive_list', action=AddStorageimportexportJobCreateDriveList, nargs='+', help='List of up to ten ' + 'drives that comprise the job. The drive list is a required element for an import job; it is not ' + 'specified for export jobs.') + c.argument('export', action=AddExport, nargs='+', help='A property containing information about the blobs to ' + 'be exported for an export job. This property is included for export jobs only.') + c.argument('provisioning_state', type=str, help='Specifies the provisioning state of the job.') + c.argument('encryption_key', action=AddEncryptionKey, nargs='+', help='Contains information about the ' + 'encryption key.') + + with self.argument_context('storageimportexport job update') as c: + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the ' + 'import/export job.', id_part='name') + c.argument('resource_group_name', resource_group_name_type) + c.argument('tags', tags_type) + c.argument('cancel_requested', arg_type=get_three_state_flag(), help='If specified, the value must be true. ' + 'The service will attempt to cancel the job.') + c.argument('state', type=str, help='If specified, the value must be Shipping, which tells the Import/Export ' + 'service that the package for the job has been shipped. The ReturnAddress and DeliveryPackage ' + 'properties must have been set either in this request or in a previous request, otherwise the ' + 'request will fail.') + c.argument('return_address', action=AddReturnAddress, nargs='+', help='Specifies the return address ' + 'information for the job.') + c.argument('return_shipping', action=AddReturnShipping, nargs='+', help='Specifies the return carrier and ' + 'customer\'s account with the carrier.') + c.argument('delivery_package', action=AddDeliveryPackage, nargs='+', help='Contains information about the ' + 'package being shipped by the customer to the Microsoft data center.') + c.argument('log_level', type=str, help='Indicates whether error logging or verbose logging is enabled.') + c.argument('backup_drive_manifest', arg_type=get_three_state_flag(), help='Indicates whether the manifest ' + 'files on the drives should be copied to block blobs.') + c.argument('drive_list', action=AddStorageimportexportJobUpdateDriveList, nargs='+', help='List of drives that ' + 'comprise the job.') + + with self.argument_context('storageimportexport job delete') as c: + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the ' + 'import/export job.', id_part='name') + c.argument('resource_group_name', resource_group_name_type) + + with self.argument_context('storageimportexport bit-locker-key list') as c: + c.argument('job_name', type=str, help='The name of the import/export job.') + c.argument('resource_group_name', resource_group_name_type) diff --git a/src/storageimportexport/azext_storageimportexport/generated/_validators.py b/src/storageimportexport/azext_storageimportexport/generated/_validators.py new file mode 100644 index 00000000000..b33a44c1ebf --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/_validators.py @@ -0,0 +1,9 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- diff --git a/src/storageimportexport/azext_storageimportexport/generated/action.py b/src/storageimportexport/azext_storageimportexport/generated/action.py new file mode 100644 index 00000000000..7479525c406 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/action.py @@ -0,0 +1,342 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access + +import argparse +from collections import defaultdict +from knack.util import CLIError + + +class AddReturnAddress(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.return_address = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'recipient-name': + d['recipient_name'] = v[0] + elif kl == 'street-address1': + d['street_address1'] = v[0] + elif kl == 'street-address2': + d['street_address2'] = v[0] + elif kl == 'city': + d['city'] = v[0] + elif kl == 'state-or-province': + d['state_or_province'] = v[0] + elif kl == 'postal-code': + d['postal_code'] = v[0] + elif kl == 'country-or-region': + d['country_or_region'] = v[0] + elif kl == 'phone': + d['phone'] = v[0] + elif kl == 'email': + d['email'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter return_address. All possible keys are: ' + 'recipient-name, street-address1, street-address2, city, state-or-province, ' + 'postal-code, country-or-region, phone, email'.format(k)) + return d + + +class AddReturnShipping(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.return_shipping = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'carrier-name': + d['carrier_name'] = v[0] + elif kl == 'carrier-account-number': + d['carrier_account_number'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter return_shipping. All possible keys are: ' + 'carrier-name, carrier-account-number'.format(k)) + return d + + +class AddShippingInformation(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.shipping_information = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'recipient-name': + d['recipient_name'] = v[0] + elif kl == 'street-address1': + d['street_address1'] = v[0] + elif kl == 'street-address2': + d['street_address2'] = v[0] + elif kl == 'city': + d['city'] = v[0] + elif kl == 'state-or-province': + d['state_or_province'] = v[0] + elif kl == 'postal-code': + d['postal_code'] = v[0] + elif kl == 'country-or-region': + d['country_or_region'] = v[0] + elif kl == 'phone': + d['phone'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter shipping_information. All possible keys ' + 'are: recipient-name, street-address1, street-address2, city, state-or-province, ' + 'postal-code, country-or-region, phone'.format(k)) + return d + + +class AddDeliveryPackage(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.delivery_package = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'carrier-name': + d['carrier_name'] = v[0] + elif kl == 'tracking-number': + d['tracking_number'] = v[0] + elif kl == 'drive-count': + d['drive_count'] = v[0] + elif kl == 'ship-date': + d['ship_date'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter delivery_package. All possible keys are: ' + 'carrier-name, tracking-number, drive-count, ship-date'.format(k)) + return d + + +class AddReturnPackage(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.return_package = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'carrier-name': + d['carrier_name'] = v[0] + elif kl == 'tracking-number': + d['tracking_number'] = v[0] + elif kl == 'drive-count': + d['drive_count'] = v[0] + elif kl == 'ship-date': + d['ship_date'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter return_package. All possible keys are: ' + 'carrier-name, tracking-number, drive-count, ship-date'.format(k)) + return d + + +class AddStorageimportexportJobCreateDriveList(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + super(AddStorageimportexportJobCreateDriveList, self).__call__(parser, namespace, action, option_string) + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'drive-id': + d['drive_id'] = v[0] + elif kl == 'bit-locker-key': + d['bit_locker_key'] = v[0] + elif kl == 'manifest-file': + d['manifest_file'] = v[0] + elif kl == 'manifest-hash': + d['manifest_hash'] = v[0] + elif kl == 'drive-header-hash': + d['drive_header_hash'] = v[0] + elif kl == 'state': + d['state'] = v[0] + elif kl == 'copy-status': + d['copy_status'] = v[0] + elif kl == 'percent-complete': + d['percent_complete'] = v[0] + elif kl == 'verbose-log-uri': + d['verbose_log_uri'] = v[0] + elif kl == 'error-log-uri': + d['error_log_uri'] = v[0] + elif kl == 'manifest-uri': + d['manifest_uri'] = v[0] + elif kl == 'bytes-succeeded': + d['bytes_succeeded'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter drive_list. All possible keys are: ' + 'drive-id, bit-locker-key, manifest-file, manifest-hash, drive-header-hash, state, ' + 'copy-status, percent-complete, verbose-log-uri, error-log-uri, manifest-uri, ' + 'bytes-succeeded'.format(k)) + return d + + +class AddExport(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.export = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'blob-list-blob-path': + d['blob_list_blob_path'] = v[0] + elif kl == 'blob-path': + d['blob_path'] = v + elif kl == 'blob-path-prefix': + d['blob_path_prefix'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter export. All possible keys are: ' + 'blob-list-blob-path, blob-path, blob-path-prefix'.format(k)) + return d + + +class AddEncryptionKey(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.encryption_key = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['kek_type'] = "MicrosoftManaged" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'kek-type': + d['kek_type'] = v[0] + elif kl == 'kek-url': + d['kek_url'] = v[0] + elif kl == 'kek-vault-resource-id': + d['kek_vault_resource_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter encryption_key. All possible keys are: ' + 'kek-type, kek-url, kek-vault-resource-id'.format(k)) + return d + + +class AddStorageimportexportJobUpdateDriveList(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + super(AddStorageimportexportJobUpdateDriveList, self).__call__(parser, namespace, action, option_string) + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'drive-id': + d['drive_id'] = v[0] + elif kl == 'bit-locker-key': + d['bit_locker_key'] = v[0] + elif kl == 'manifest-file': + d['manifest_file'] = v[0] + elif kl == 'manifest-hash': + d['manifest_hash'] = v[0] + elif kl == 'drive-header-hash': + d['drive_header_hash'] = v[0] + elif kl == 'state': + d['state'] = v[0] + elif kl == 'copy-status': + d['copy_status'] = v[0] + elif kl == 'percent-complete': + d['percent_complete'] = v[0] + elif kl == 'verbose-log-uri': + d['verbose_log_uri'] = v[0] + elif kl == 'error-log-uri': + d['error_log_uri'] = v[0] + elif kl == 'manifest-uri': + d['manifest_uri'] = v[0] + elif kl == 'bytes-succeeded': + d['bytes_succeeded'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter drive_list. All possible keys are: ' + 'drive-id, bit-locker-key, manifest-file, manifest-hash, drive-header-hash, state, ' + 'copy-status, percent-complete, verbose-log-uri, error-log-uri, manifest-uri, ' + 'bytes-succeeded'.format(k)) + return d diff --git a/src/storageimportexport/azext_storageimportexport/generated/commands.py b/src/storageimportexport/azext_storageimportexport/generated/commands.py new file mode 100644 index 00000000000..b2a862c1bc9 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/commands.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals + +from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): + + from azext_storageimportexport.generated._client_factory import cf_location + storageimportexport_location = CliCommandType( + operations_tmpl='azext_storageimportexport.vendored_sdks.storageimportexport.operations._locations_operations#L' + 'ocationsOperations.{}', + client_factory=cf_location) + with self.command_group('storageimportexport location', storageimportexport_location, + client_factory=cf_location) as g: + g.custom_command('list', 'storageimportexport_location_list') + g.custom_show_command('show', 'storageimportexport_location_show') + + from azext_storageimportexport.generated._client_factory import cf_job + storageimportexport_job = CliCommandType( + operations_tmpl='azext_storageimportexport.vendored_sdks.storageimportexport.operations._jobs_operations#JobsOp' + 'erations.{}', + client_factory=cf_job) + with self.command_group('storageimportexport job', storageimportexport_job, client_factory=cf_job) as g: + g.custom_command('list', 'storageimportexport_job_list') + g.custom_show_command('show', 'storageimportexport_job_show') + g.custom_command('create', 'storageimportexport_job_create') + g.custom_command('update', 'storageimportexport_job_update') + g.custom_command('delete', 'storageimportexport_job_delete', confirmation=True) + + from azext_storageimportexport.generated._client_factory import cf_bit_locker_key + storageimportexport_bit_locker_key = CliCommandType( + operations_tmpl='azext_storageimportexport.vendored_sdks.storageimportexport.operations._bit_locker_keys_operat' + 'ions#BitLockerKeysOperations.{}', + client_factory=cf_bit_locker_key) + with self.command_group('storageimportexport bit-locker-key', storageimportexport_bit_locker_key, + client_factory=cf_bit_locker_key) as g: + g.custom_command('list', 'storageimportexport_bit_locker_key_list') + + with self.command_group('storageimportexport', is_experimental=True): + pass diff --git a/src/storageimportexport/azext_storageimportexport/generated/custom.py b/src/storageimportexport/azext_storageimportexport/generated/custom.py new file mode 100644 index 00000000000..2d2caa621ae --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/generated/custom.py @@ -0,0 +1,131 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + + +def storageimportexport_location_list(client): + return client.list() + + +def storageimportexport_location_show(client, + location_name): + return client.get(location_name=location_name) + + +def storageimportexport_job_list(client, + top=None, + filter_=None, + resource_group_name=None): + if resource_group_name: + return client.list_by_resource_group(top=top, + filter=filter_, + resource_group_name=resource_group_name) + return client.list_by_subscription(top=top, + filter=filter_) + + +def storageimportexport_job_show(client, + job_name, + resource_group_name): + return client.get(job_name=job_name, + resource_group_name=resource_group_name) + + +def storageimportexport_job_create(client, + job_name, + resource_group_name, + client_tenant_id=None, + location=None, + tags=None, + storage_account_id=None, + job_type=None, + return_address=None, + return_shipping=None, + shipping_information=None, + delivery_package=None, + return_package=None, + diagnostics_path=None, + log_level=None, + backup_drive_manifest=None, + state=None, + cancel_requested=None, + percent_complete=None, + incomplete_blob_list_uri=None, + drive_list=None, + export=None, + provisioning_state=None, + encryption_key=None): + body = {} + body['location'] = location + body['tags'] = tags + body['properties'] = {} + body['properties']['storage_account_id'] = storage_account_id + body['properties']['job_type'] = job_type + body['properties']['return_address'] = return_address + body['properties']['return_shipping'] = return_shipping + body['properties']['shipping_information'] = shipping_information + body['properties']['delivery_package'] = delivery_package + body['properties']['return_package'] = return_package + body['properties']['diagnostics_path'] = diagnostics_path + body['properties']['log_level'] = log_level + body['properties']['backup_drive_manifest'] = backup_drive_manifest + body['properties']['state'] = state + body['properties']['cancel_requested'] = cancel_requested + body['properties']['percent_complete'] = percent_complete + body['properties']['incomplete_blob_list_uri'] = incomplete_blob_list_uri + body['properties']['drive_list'] = drive_list + body['properties']['export'] = export + body['properties']['provisioning_state'] = provisioning_state + body['properties']['encryption_key'] = encryption_key + return client.create(job_name=job_name, + resource_group_name=resource_group_name, + client_tenant_id=client_tenant_id, + body=body) + + +def storageimportexport_job_update(client, + job_name, + resource_group_name, + tags=None, + cancel_requested=None, + state=None, + return_address=None, + return_shipping=None, + delivery_package=None, + log_level=None, + backup_drive_manifest=None, + drive_list=None): + body = {} + body['tags'] = tags + body['cancel_requested'] = cancel_requested + body['state'] = state + body['return_address'] = return_address + body['return_shipping'] = return_shipping + body['delivery_package'] = delivery_package + body['log_level'] = log_level + body['backup_drive_manifest'] = backup_drive_manifest + body['drive_list'] = drive_list + return client.update(job_name=job_name, + resource_group_name=resource_group_name, + body=body) + + +def storageimportexport_job_delete(client, + job_name, + resource_group_name): + return client.delete(job_name=job_name, + resource_group_name=resource_group_name) + + +def storageimportexport_bit_locker_key_list(client, + job_name, + resource_group_name): + return client.list(job_name=job_name, + resource_group_name=resource_group_name) diff --git a/src/storageimportexport/azext_storageimportexport/manual/__init__.py b/src/storageimportexport/azext_storageimportexport/manual/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/manual/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/storageimportexport/azext_storageimportexport/tests/__init__.py b/src/storageimportexport/azext_storageimportexport/tests/__init__.py new file mode 100644 index 00000000000..70488e93851 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/tests/__init__.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +import inspect +import logging +import os +import sys +import traceback +import datetime as dt + +from azure.core.exceptions import AzureError +from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError + + +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) +exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" + + +def try_manual(func): + def import_manual_function(origin_func): + from importlib import import_module + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() + if not decorated_path.startswith(module_path): + raise Exception("Decorator can only be used in submodules!") + manual_path = os.path.join( + decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_file_path, manual_file_name = os.path.split(manual_path) + module_name, _ = os.path.splitext(manual_file_name) + manual_module = "..manual." + \ + ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) + return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + + def get_func_to_call(): + func_to_call = func + try: + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) + except (ImportError, AttributeError): + pass + return func_to_call + + def wrapper(*args, **kwargs): + func_to_call = get_func_to_call() + logger.info("running %s()...", func.__name__) + try: + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) + exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret + + if inspect.isclass(func): + return get_func_to_call() + return wrapper + + +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + +def raise_if(): + if exceptions: + if len(exceptions) <= 1: + raise exceptions[0][1][1] + message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) + raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/storageimportexport/azext_storageimportexport/tests/latest/__init__.py b/src/storageimportexport/azext_storageimportexport/tests/latest/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/tests/latest/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/storageimportexport/azext_storageimportexport/tests/latest/example_steps.py b/src/storageimportexport/azext_storageimportexport/tests/latest/example_steps.py new file mode 100644 index 00000000000..b33e5676443 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/tests/latest/example_steps.py @@ -0,0 +1,170 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +from .. import try_manual + + +# EXAMPLE: /Jobs/put/Create export job +@try_manual +def step_job_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job create ' + '--location "{myLocation}" ' + '--backup-drive-manifest true ' + '--diagnostics-path "waimportexport" ' + '--export blob-path-prefix="/" ' + '--job-type "Export" ' + '--log-level "Verbose" ' + '--return-address city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" ' + 'postal-code="98007" recipient-name="Test" state-or-province="wa" street-address1="Street1" ' + 'street-address2="street2" ' + '--return-shipping carrier-account-number="989ffff" carrier-name="FedEx" ' + '--storage-account-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ClassicSto' + 'rage/storageAccounts/{sa}" ' + '--name "{myJob2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/Create import job +@try_manual +def step_job_create2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job create ' + '--location "{myLocation}" ' + '--backup-drive-manifest true ' + '--diagnostics-path "waimportexport" ' + '--drive-list bit-locker-key="238810-662376-448998-450120-652806-203390-606320-483076" ' + 'drive-header-hash="0:1048576:FB6B6ED500D49DA6E0D723C98D42C657F2881CC13357C28DCECA6A524F1292501571A3212385' + '40E621AB5BD9C9A32637615919A75593E6CB5C1515DAE341CABF;135266304:143360:C957A189AFC38C4E80731252301EB91427C' + 'E55E61448FA3C73C6FDDE70ABBC197947EC8D0249A2C639BB10B95957D5820A4BE8DFBBF76FFFA688AE5CE0D42EC3" ' + 'drive-id="9CA995BB" manifest-file="\\\\8a0c23f7-14b7-470a-9633-fcd46590a1bc.manifest" ' + 'manifest-hash="4228EC5D8E048CB9B515338C789314BE8D0B2FDBC7C7A0308E1C826242CDE74E" ' + '--job-type "Import" ' + '--log-level "Verbose" ' + '--return-address city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" ' + 'postal-code="98007" recipient-name="Test" state-or-province="wa" street-address1="Street1" ' + 'street-address2="street2" ' + '--return-shipping carrier-account-number="989ffff" carrier-name="FedEx" ' + '--storage-account-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ClassicSto' + 'rage/storageAccounts/{sa}" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/Get export job +@try_manual +def step_job_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job show ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/Get import job +@try_manual +def step_job_show2(test, rg, checks=None): + return step_job_show(test, rg, checks) + + +# EXAMPLE: /Jobs/get/List jobs in a resource group +@try_manual +def step_job_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job list ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/List jobs in a subscription +@try_manual +def step_job_list2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Jobs/patch/Update export job +@try_manual +def step_job_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job update ' + '--backup-drive-manifest true ' + '--log-level "Verbose" ' + '--state "" ' + '--name "{myJob2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/Update import job +@try_manual +def step_job_update2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job update ' + '--backup-drive-manifest true ' + '--log-level "Verbose" ' + '--state "" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /BitLockerKeys/post/List BitLocker Keys for drives in a job +@try_manual +def step_bit_locker_key_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport bit-locker-key list ' + '--job-name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/delete/Delete job +@try_manual +def step_job_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport job delete -y ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Locations/get/Get locations +@try_manual +def step_location_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport location show ' + '--name "{myLocation}"', + checks=checks) + + +# EXAMPLE: /Locations/get/List locations +@try_manual +def step_location_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az storageimportexport location list', + checks=checks) + diff --git a/src/storageimportexport/azext_storageimportexport/tests/latest/test_storageimportexport_scenario.py b/src/storageimportexport/azext_storageimportexport/tests/latest/test_storageimportexport_scenario.py new file mode 100644 index 00000000000..981c8e5ba08 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/tests/latest/test_storageimportexport_scenario.py @@ -0,0 +1,86 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import os +from azure.cli.testsdk import ScenarioTest +from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer +from .example_steps import step_location_show +from .example_steps import step_bit_locker_key_list +from .example_steps import step_job_list +from .example_steps import step_job_list2 +from .example_steps import step_location_list +from .example_steps import step_job_delete +from .. import ( + try_manual, + raise_if, + calc_coverage +) + + +TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) + + +# Env setup_scenario +@try_manual +def setup_scenario(test, rg): + pass + + +# Env cleanup_scenario +@try_manual +def cleanup_scenario(test, rg): + pass + + +# Testcase: Scenario +@try_manual +def call_scenario(test, rg): + setup_scenario(test, rg) + # STEP NOT FOUND: /Jobs/put/Create job + # STEP NOT FOUND: /Jobs/get/Get job + step_location_show(test, rg, checks=[]) + step_bit_locker_key_list(test, rg, checks=[]) + step_job_list(test, rg, checks=[]) + step_job_list2(test, rg, checks=[ + test.check('length(@)', 1), + ]) + step_location_list(test, rg, checks=[]) + # STEP NOT FOUND: /Jobs/patch/Update job + step_job_delete(test, rg, checks=[]) + cleanup_scenario(test, rg) + + +# Test class for Scenario +@try_manual +class StorageimportexportScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(StorageimportexportScenarioTest, self).__init__(*args, **kwargs) + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) + + self.kwargs.update({ + 'myLocation': 'West US', + 'myJob': 'myJob', + 'myJob2': 'myExportJob', + }) + + + @ResourceGroupPreparer(name_prefix='cliteststorageimportexport_myResourceGroup'[:7], key='rg', + parameter_name='rg') + @StorageAccountPreparer(name_prefix='cliteststorageimportexport_test'[:7], key='sa', + resource_group_parameter_name='rg') + def test_storageimportexport_Scenario(self, rg): + call_scenario(self, rg) + calc_coverage(__file__) + raise_if() + diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/__init__.py new file mode 100644 index 00000000000..c940915c060 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_import_export import StorageImportExport +__all__ = ['StorageImportExport'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_configuration.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_configuration.py new file mode 100644 index 00000000000..425815bdca6 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_configuration.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +VERSION = "unknown" + +class StorageImportExportConfiguration(Configuration): + """Configuration for StorageImportExport. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + accept_language=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageImportExportConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.accept_language = accept_language + self.api_version = "2016-11-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'storageimportexport/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_storage_import_export.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_storage_import_export.py new file mode 100644 index 00000000000..90b307658bb --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/_storage_import_export.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +from ._configuration import StorageImportExportConfiguration +from .operations import LocationsOperations +from .operations import JobsOperations +from .operations import BitLockerKeysOperations +from .operations import Operations +from . import models + + +class StorageImportExport(object): + """The Storage Import/Export Resource Provider API. + + :ivar locations: LocationsOperations operations + :vartype locations: storage_import_export.operations.LocationsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: storage_import_export.operations.JobsOperations + :ivar bit_locker_keys: BitLockerKeysOperations operations + :vartype bit_locker_keys: storage_import_export.operations.BitLockerKeysOperations + :ivar operations: Operations operations + :vartype operations: storage_import_export.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + :param str base_url: Service URL + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + accept_language=None, # type: Optional[str] + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageImportExportConfiguration(credential, subscription_id, accept_language, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.locations = LocationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.bit_locker_keys = BitLockerKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> StorageImportExport + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/__init__.py new file mode 100644 index 00000000000..9dacfa50491 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_import_export import StorageImportExport +__all__ = ['StorageImportExport'] diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_configuration.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_configuration.py new file mode 100644 index 00000000000..be340cd3cc0 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_configuration.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class StorageImportExportConfiguration(Configuration): + """Configuration for StorageImportExport. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + accept_language: Optional[str] = None, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageImportExportConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.accept_language = accept_language + self.api_version = "2016-11-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'storageimportexport/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_storage_import_export.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_storage_import_export.py new file mode 100644 index 00000000000..59ebb28a17a --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/_storage_import_export.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import StorageImportExportConfiguration +from .operations import LocationsOperations +from .operations import JobsOperations +from .operations import BitLockerKeysOperations +from .operations import Operations +from .. import models + + +class StorageImportExport(object): + """The Storage Import/Export Resource Provider API. + + :ivar locations: LocationsOperations operations + :vartype locations: storage_import_export.aio.operations.LocationsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: storage_import_export.aio.operations.JobsOperations + :ivar bit_locker_keys: BitLockerKeysOperations operations + :vartype bit_locker_keys: storage_import_export.aio.operations.BitLockerKeysOperations + :ivar operations: Operations operations + :vartype operations: storage_import_export.aio.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + :param str base_url: Service URL + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + accept_language: Optional[str] = None, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageImportExportConfiguration(credential, subscription_id, accept_language, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.locations = LocationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.bit_locker_keys = BitLockerKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "StorageImportExport": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/__init__.py new file mode 100644 index 00000000000..e1f9bbe57f6 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._locations_operations import LocationsOperations +from ._jobs_operations import JobsOperations +from ._bit_locker_keys_operations import BitLockerKeysOperations +from ._operations import Operations + +__all__ = [ + 'LocationsOperations', + 'JobsOperations', + 'BitLockerKeysOperations', + 'Operations', +] diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_bit_locker_keys_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_bit_locker_keys_operations.py new file mode 100644 index 00000000000..290d6728d8f --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_bit_locker_keys_operations.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BitLockerKeysOperations: + """BitLockerKeysOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + job_name: str, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.GetBitLockerKeysResponse"]: + """Returns the BitLocker Keys for all drives in the specified job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GetBitLockerKeysResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.GetBitLockerKeysResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.GetBitLockerKeysResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('GetBitLockerKeysResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}/listBitLockerKeys'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_jobs_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_jobs_operations.py new file mode 100644 index 00000000000..96f171c4427 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_jobs_operations.py @@ -0,0 +1,479 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class JobsOperations: + """JobsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_subscription( + self, + top: Optional[int] = None, + filter: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.ListJobsResponse"]: + """Returns all active and completed jobs in a subscription. + + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: int + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + top: Optional[int] = None, + filter: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.ListJobsResponse"]: + """Returns all active and completed jobs in a resource group. + + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: int + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + async def get( + self, + job_name: str, + resource_group_name: str, + **kwargs + ) -> "models.JobResponse": + """Gets information about an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def update( + self, + job_name: str, + resource_group_name: str, + body: "models.UpdateJobParameters", + **kwargs + ) -> "models.JobResponse": + """Updates specific properties of a job. You can call this operation to notify the Import/Export + service that the hard drives comprising the import or export job have been shipped to the + Microsoft data center. It can also be used to cancel an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters to update in the job. + :type body: ~storage_import_export.models.UpdateJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'UpdateJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def create( + self, + job_name: str, + resource_group_name: str, + body: "models.PutJobParameters", + client_tenant_id: Optional[str] = None, + **kwargs + ) -> "models.JobResponse": + """Creates a new job or updates an existing job in the specified subscription. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters used for creating the job. + :type body: ~storage_import_export.models.PutJobParameters + :param client_tenant_id: The tenant ID of the client making the request. + :type client_tenant_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + if client_tenant_id is not None: + header_parameters['x-ms-client-tenant-id'] = self._serialize.header("client_tenant_id", client_tenant_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'PutJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def delete( + self, + job_name: str, + resource_group_name: str, + **kwargs + ) -> None: + """Deletes an existing job. Only jobs in the Creating or Completed states can be deleted. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_locations_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_locations_operations.py new file mode 100644 index 00000000000..0edf297c547 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_locations_operations.py @@ -0,0 +1,165 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LocationsOperations: + """LocationsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.LocationsResponse"]: + """Returns a list of locations to which you can ship the disks associated with an import or export + job. A location is a Microsoft data center region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LocationsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.LocationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LocationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LocationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/locations'} # type: ignore + + async def get( + self, + location_name: str, + **kwargs + ) -> "models.Location": + """Returns the details about a location to which you can ship the disks associated with an import + or export job. A location is an Azure region. + + :param location_name: The name of the location. For example, West US or westus. + :type location_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Location, or the result of cls(response) + :rtype: ~storage_import_export.models.Location + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Location"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Location', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/providers/Microsoft.ImportExport/locations/{locationName}'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_operations.py new file mode 100644 index 00000000000..c0ece17bc12 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/aio/operations/_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.ListOperationsResponse"]: + """Returns the list of operations supported by the import/export resource provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListOperationsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListOperationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListOperationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListOperationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/operations'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/__init__.py new file mode 100644 index 00000000000..03d27140163 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/__init__.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import DeliveryPackageInformation + from ._models_py3 import DriveBitLockerKey + from ._models_py3 import DriveStatus + from ._models_py3 import EncryptionKeyDetails + from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponseErrorDetailsItem + from ._models_py3 import Export + from ._models_py3 import GetBitLockerKeysResponse + from ._models_py3 import IdentityDetails + from ._models_py3 import JobDetails + from ._models_py3 import JobResponse + from ._models_py3 import ListJobsResponse + from ._models_py3 import ListOperationsResponse + from ._models_py3 import Location + from ._models_py3 import LocationsResponse + from ._models_py3 import Operation + from ._models_py3 import PackageInfomation + from ._models_py3 import PutJobParameters + from ._models_py3 import ReturnAddress + from ._models_py3 import ReturnShipping + from ._models_py3 import ShippingInformation + from ._models_py3 import SystemData + from ._models_py3 import UpdateJobParameters +except (SyntaxError, ImportError): + from ._models import DeliveryPackageInformation # type: ignore + from ._models import DriveBitLockerKey # type: ignore + from ._models import DriveStatus # type: ignore + from ._models import EncryptionKeyDetails # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import ErrorResponseErrorDetailsItem # type: ignore + from ._models import Export # type: ignore + from ._models import GetBitLockerKeysResponse # type: ignore + from ._models import IdentityDetails # type: ignore + from ._models import JobDetails # type: ignore + from ._models import JobResponse # type: ignore + from ._models import ListJobsResponse # type: ignore + from ._models import ListOperationsResponse # type: ignore + from ._models import Location # type: ignore + from ._models import LocationsResponse # type: ignore + from ._models import Operation # type: ignore + from ._models import PackageInfomation # type: ignore + from ._models import PutJobParameters # type: ignore + from ._models import ReturnAddress # type: ignore + from ._models import ReturnShipping # type: ignore + from ._models import ShippingInformation # type: ignore + from ._models import SystemData # type: ignore + from ._models import UpdateJobParameters # type: ignore + +from ._storage_import_export_enums import ( + CreatedByType, + DriveState, + EncryptionKekType, + IdentityType, +) + +__all__ = [ + 'DeliveryPackageInformation', + 'DriveBitLockerKey', + 'DriveStatus', + 'EncryptionKeyDetails', + 'ErrorResponse', + 'ErrorResponseErrorDetailsItem', + 'Export', + 'GetBitLockerKeysResponse', + 'IdentityDetails', + 'JobDetails', + 'JobResponse', + 'ListJobsResponse', + 'ListOperationsResponse', + 'Location', + 'LocationsResponse', + 'Operation', + 'PackageInfomation', + 'PutJobParameters', + 'ReturnAddress', + 'ReturnShipping', + 'ShippingInformation', + 'SystemData', + 'UpdateJobParameters', + 'CreatedByType', + 'DriveState', + 'EncryptionKekType', + 'IdentityType', +] diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models.py new file mode 100644 index 00000000000..b31bd585d3b --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models.py @@ -0,0 +1,970 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class DeliveryPackageInformation(msrest.serialization.Model): + """Contains information about the delivery package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: The number of drives included in the package. + :type drive_count: long + :param ship_date: The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeliveryPackageInformation, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.tracking_number = kwargs['tracking_number'] + self.drive_count = kwargs.get('drive_count', None) + self.ship_date = kwargs.get('ship_date', None) + + +class DriveBitLockerKey(msrest.serialization.Model): + """BitLocker recovery key or password to the specified drive. + + :param bit_locker_key: BitLocker recovery key or password. + :type bit_locker_key: str + :param drive_id: Drive ID. + :type drive_id: str + """ + + _attribute_map = { + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'drive_id': {'key': 'driveId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DriveBitLockerKey, self).__init__(**kwargs) + self.bit_locker_key = kwargs.get('bit_locker_key', None) + self.drive_id = kwargs.get('drive_id', None) + + +class DriveStatus(msrest.serialization.Model): + """Provides information about the drive's status. + + :param drive_id: The drive's hardware serial number, without spaces. + :type drive_id: str + :param bit_locker_key: The BitLocker key used to encrypt the drive. + :type bit_locker_key: str + :param manifest_file: The relative path of the manifest file on the drive. + :type manifest_file: str + :param manifest_hash: The Base16-encoded MD5 hash of the manifest file on the drive. + :type manifest_hash: str + :param drive_header_hash: The drive header hash value. + :type drive_header_hash: str + :param state: The drive's current state. Possible values include: "Specified", "Received", + "NeverReceived", "Transferring", "Completed", "CompletedMoreInfo", "ShippedBack". + :type state: str or ~storage_import_export.models.DriveState + :param copy_status: Detailed status about the data transfer process. This field is not returned + in the response until the drive is in the Transferring state. + :type copy_status: str + :param percent_complete: Percentage completed for the drive. + :type percent_complete: long + :param verbose_log_uri: A URI that points to the blob containing the verbose log for the data + transfer operation. + :type verbose_log_uri: str + :param error_log_uri: A URI that points to the blob containing the error log for the data + transfer operation. + :type error_log_uri: str + :param manifest_uri: A URI that points to the blob containing the drive manifest file. + :type manifest_uri: str + :param bytes_succeeded: Bytes successfully transferred for the drive. + :type bytes_succeeded: long + """ + + _attribute_map = { + 'drive_id': {'key': 'driveId', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'drive_header_hash': {'key': 'driveHeaderHash', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'verbose_log_uri': {'key': 'verboseLogUri', 'type': 'str'}, + 'error_log_uri': {'key': 'errorLogUri', 'type': 'str'}, + 'manifest_uri': {'key': 'manifestUri', 'type': 'str'}, + 'bytes_succeeded': {'key': 'bytesSucceeded', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(DriveStatus, self).__init__(**kwargs) + self.drive_id = kwargs.get('drive_id', None) + self.bit_locker_key = kwargs.get('bit_locker_key', None) + self.manifest_file = kwargs.get('manifest_file', None) + self.manifest_hash = kwargs.get('manifest_hash', None) + self.drive_header_hash = kwargs.get('drive_header_hash', None) + self.state = kwargs.get('state', None) + self.copy_status = kwargs.get('copy_status', None) + self.percent_complete = kwargs.get('percent_complete', None) + self.verbose_log_uri = kwargs.get('verbose_log_uri', None) + self.error_log_uri = kwargs.get('error_log_uri', None) + self.manifest_uri = kwargs.get('manifest_uri', None) + self.bytes_succeeded = kwargs.get('bytes_succeeded', None) + + +class EncryptionKeyDetails(msrest.serialization.Model): + """Specifies the encryption key properties. + + :param kek_type: The type of kek encryption key. Possible values include: "MicrosoftManaged", + "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~storage_import_export.models.EncryptionKekType + :param kek_url: Specifies the url for kek encryption key. + :type kek_url: str + :param kek_vault_resource_id: Specifies the keyvault resource id for kek encryption key. + :type kek_vault_resource_id: str + """ + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionKeyDetails, self).__init__(**kwargs) + self.kek_type = kwargs.get('kek_type', "MicrosoftManaged") + self.kek_url = kwargs.get('kek_url', None) + self.kek_vault_resource_id = kwargs.get('kek_vault_resource_id', None) + + +class ErrorResponse(msrest.serialization.Model): + """Response when errors occurred. + + :param code: Provides information about the error code. + :type code: str + :param message: Provides information about the error message. + :type message: str + :param target: Provides information about the error target. + :type target: str + :param details: Describes the error details if present. + :type details: list[~storage_import_export.models.ErrorResponseErrorDetailsItem] + :param innererror: Inner error object if present. + :type innererror: object + """ + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[ErrorResponseErrorDetailsItem]'}, + 'innererror': {'key': 'error.innererror', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + self.innererror = kwargs.get('innererror', None) + + +class ErrorResponseErrorDetailsItem(msrest.serialization.Model): + """ErrorResponseErrorDetailsItem. + + :param code: Provides information about the error code. + :type code: str + :param target: Provides information about the error target. + :type target: str + :param message: Provides information about the error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseErrorDetailsItem, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.target = kwargs.get('target', None) + self.message = kwargs.get('message', None) + + +class Export(msrest.serialization.Model): + """A property containing information about the blobs to be exported for an export job. This property is required for export jobs, but must not be specified for import jobs. + + :param blob_list_blob_path: The relative URI to the block blob that contains the list of blob + paths or blob path prefixes as defined above, beginning with the container name. If the blob is + in root container, the URI must begin with $root. + :type blob_list_blob_path: str + :param blob_path: A collection of blob-path strings. + :type blob_path: list[str] + :param blob_path_prefix: A collection of blob-prefix strings. + :type blob_path_prefix: list[str] + """ + + _attribute_map = { + 'blob_list_blob_path': {'key': 'blobListBlobPath', 'type': 'str'}, + 'blob_path': {'key': 'blobList.blobPath', 'type': '[str]'}, + 'blob_path_prefix': {'key': 'blobList.blobPathPrefix', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(Export, self).__init__(**kwargs) + self.blob_list_blob_path = kwargs.get('blob_list_blob_path', None) + self.blob_path = kwargs.get('blob_path', None) + self.blob_path_prefix = kwargs.get('blob_path_prefix', None) + + +class GetBitLockerKeysResponse(msrest.serialization.Model): + """GetBitLockerKeys response. + + :param value: drive status. + :type value: list[~storage_import_export.models.DriveBitLockerKey] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DriveBitLockerKey]'}, + } + + def __init__( + self, + **kwargs + ): + super(GetBitLockerKeysResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class IdentityDetails(msrest.serialization.Model): + """Specifies the identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: The type of identity. Possible values include: "None", "SystemAssigned", + "UserAssigned". Default value: "None". + :type type: str or ~storage_import_export.models.IdentityType + :ivar principal_id: Specifies the principal id for the identity for the job. + :vartype principal_id: str + :ivar tenant_id: Specifies the tenant id for the identity for the job. + :vartype tenant_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IdentityDetails, self).__init__(**kwargs) + self.type = kwargs.get('type', "None") + self.principal_id = None + self.tenant_id = None + + +class JobDetails(msrest.serialization.Model): + """Specifies the job properties. + + :param storage_account_id: The resource identifier of the storage account where data will be + imported to or exported from. + :type storage_account_id: str + :param job_type: The type of job. + :type job_type: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param shipping_information: Contains information about the Microsoft datacenter to which the + drives should be shipped. + :type shipping_information: ~storage_import_export.models.ShippingInformation + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param return_package: Contains information about the package being shipped from the Microsoft + data center to the customer to return the drives. The format is the same as the deliveryPackage + property above. This property is not included if the drives have not yet been returned. + :type return_package: ~storage_import_export.models.PackageInfomation + :param diagnostics_path: The virtual blob directory to which the copy logs and backups of drive + manifest files (if enabled) will be stored. + :type diagnostics_path: str + :param log_level: Default value is Error. Indicates whether error logging or verbose logging + will be enabled. + :type log_level: str + :param backup_drive_manifest: Default value is false. Indicates whether the manifest files on + the drives should be copied to block blobs. + :type backup_drive_manifest: bool + :param state: Current state of the job. + :type state: str + :param cancel_requested: Indicates whether a request has been submitted to cancel the job. + :type cancel_requested: bool + :param percent_complete: Overall percentage completed for the job. + :type percent_complete: int + :param incomplete_blob_list_uri: A blob path that points to a block blob containing a list of + blob names that were not exported due to insufficient drive space. If all blobs were exported + successfully, then this element is not included in the response. + :type incomplete_blob_list_uri: str + :param drive_list: List of up to ten drives that comprise the job. The drive list is a required + element for an import job; it is not specified for export jobs. + :type drive_list: list[~storage_import_export.models.DriveStatus] + :param export: A property containing information about the blobs to be exported for an export + job. This property is included for export jobs only. + :type export: ~storage_import_export.models.Export + :param provisioning_state: Specifies the provisioning state of the job. + :type provisioning_state: str + :param encryption_key: Contains information about the encryption key. + :type encryption_key: ~storage_import_export.models.EncryptionKeyDetails + """ + + _attribute_map = { + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'return_address': {'key': 'returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'returnShipping', 'type': 'ReturnShipping'}, + 'shipping_information': {'key': 'shippingInformation', 'type': 'ShippingInformation'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageInfomation'}, + 'diagnostics_path': {'key': 'diagnosticsPath', 'type': 'str'}, + 'log_level': {'key': 'logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'backupDriveManifest', 'type': 'bool'}, + 'state': {'key': 'state', 'type': 'str'}, + 'cancel_requested': {'key': 'cancelRequested', 'type': 'bool'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, + 'incomplete_blob_list_uri': {'key': 'incompleteBlobListUri', 'type': 'str'}, + 'drive_list': {'key': 'driveList', 'type': '[DriveStatus]'}, + 'export': {'key': 'export', 'type': 'Export'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'encryption_key': {'key': 'encryptionKey', 'type': 'EncryptionKeyDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(JobDetails, self).__init__(**kwargs) + self.storage_account_id = kwargs.get('storage_account_id', None) + self.job_type = kwargs.get('job_type', None) + self.return_address = kwargs.get('return_address', None) + self.return_shipping = kwargs.get('return_shipping', None) + self.shipping_information = kwargs.get('shipping_information', None) + self.delivery_package = kwargs.get('delivery_package', None) + self.return_package = kwargs.get('return_package', None) + self.diagnostics_path = kwargs.get('diagnostics_path', None) + self.log_level = kwargs.get('log_level', None) + self.backup_drive_manifest = kwargs.get('backup_drive_manifest', None) + self.state = kwargs.get('state', None) + self.cancel_requested = kwargs.get('cancel_requested', None) + self.percent_complete = kwargs.get('percent_complete', None) + self.incomplete_blob_list_uri = kwargs.get('incomplete_blob_list_uri', None) + self.drive_list = kwargs.get('drive_list', None) + self.export = kwargs.get('export', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.encryption_key = kwargs.get('encryption_key', None) + + +class JobResponse(msrest.serialization.Model): + """Contains the job information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_data: SystemData of ImportExport Jobs. + :vartype system_data: ~storage_import_export.models.SystemData + :ivar id: Specifies the resource identifier of the job. + :vartype id: str + :ivar name: Specifies the name of the job. + :vartype name: str + :ivar type: Specifies the type of the job resource. + :vartype type: str + :param location: Specifies the Azure location where the job is created. + :type location: str + :param tags: A set of tags. Specifies the tags that are assigned to the job. + :type tags: object + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + :param identity: Specifies the job identity details. + :type identity: ~storage_import_export.models.IdentityDetails + """ + + _validation = { + 'system_data': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + 'identity': {'key': 'identity', 'type': 'IdentityDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(JobResponse, self).__init__(**kwargs) + self.system_data = None + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.properties = kwargs.get('properties', None) + self.identity = kwargs.get('identity', None) + + +class ListJobsResponse(msrest.serialization.Model): + """List jobs response. + + :param next_link: link to next batch of jobs. + :type next_link: str + :param value: Job list. + :type value: list[~storage_import_export.models.JobResponse] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobResponse]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListJobsResponse, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) + + +class ListOperationsResponse(msrest.serialization.Model): + """List operations response. + + :param value: operations. + :type value: list[~storage_import_export.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListOperationsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Location(msrest.serialization.Model): + """Provides information about an Azure data center location. + + :param id: Specifies the resource identifier of the location. + :type id: str + :param name: Specifies the name of the location. Use List Locations to get all supported + locations. + :type name: str + :param type: Specifies the type of the location. + :type type: str + :param recipient_name: The recipient name to use when shipping the drives to the Azure data + center. + :type recipient_name: str + :param street_address1: The first line of the street address to use when shipping the drives to + the Azure data center. + :type street_address1: str + :param street_address2: The second line of the street address to use when shipping the drives + to the Azure data center. + :type street_address2: str + :param city: The city name to use when shipping the drives to the Azure data center. + :type city: str + :param state_or_province: The state or province to use when shipping the drives to the Azure + data center. + :type state_or_province: str + :param postal_code: The postal code to use when shipping the drives to the Azure data center. + :type postal_code: str + :param country_or_region: The country or region to use when shipping the drives to the Azure + data center. + :type country_or_region: str + :param phone: The phone number for the Azure data center. + :type phone: str + :param additional_shipping_information: Additional shipping information for customer, specific + to datacenter to which customer should send their disks. + :type additional_shipping_information: str + :param supported_carriers: A list of carriers that are supported at this location. + :type supported_carriers: list[str] + :param alternate_locations: A list of location IDs that should be used to ship shipping drives + to for jobs created against the current location. If the current location is active, it will be + part of the list. If it is temporarily closed due to maintenance, this list may contain other + locations. + :type alternate_locations: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recipient_name': {'key': 'properties.recipientName', 'type': 'str'}, + 'street_address1': {'key': 'properties.streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'properties.streetAddress2', 'type': 'str'}, + 'city': {'key': 'properties.city', 'type': 'str'}, + 'state_or_province': {'key': 'properties.stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'properties.postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'properties.countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'properties.phone', 'type': 'str'}, + 'additional_shipping_information': {'key': 'properties.additionalShippingInformation', 'type': 'str'}, + 'supported_carriers': {'key': 'properties.supportedCarriers', 'type': '[str]'}, + 'alternate_locations': {'key': 'properties.alternateLocations', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(Location, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + self.recipient_name = kwargs.get('recipient_name', None) + self.street_address1 = kwargs.get('street_address1', None) + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs.get('city', None) + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs.get('postal_code', None) + self.country_or_region = kwargs.get('country_or_region', None) + self.phone = kwargs.get('phone', None) + self.additional_shipping_information = kwargs.get('additional_shipping_information', None) + self.supported_carriers = kwargs.get('supported_carriers', None) + self.alternate_locations = kwargs.get('alternate_locations', None) + + +class LocationsResponse(msrest.serialization.Model): + """Locations response. + + :param value: locations. + :type value: list[~storage_import_export.models.Location] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Location]'}, + } + + def __init__( + self, + **kwargs + ): + super(LocationsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Operation(msrest.serialization.Model): + """Describes a supported operation by the Storage Import/Export job API. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the operation. + :type name: str + :param provider: The resource provider name to which the operation belongs. + :type provider: str + :param resource: The name of the resource to which the operation belongs. + :type resource: str + :param operation: The display name of the operation. + :type operation: str + :param description: Short description of the operation. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'provider': {'key': 'display.provider', 'type': 'str'}, + 'resource': {'key': 'display.resource', 'type': 'str'}, + 'operation': {'key': 'display.operation', 'type': 'str'}, + 'description': {'key': 'display.description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class PackageInfomation(msrest.serialization.Model): + """Contains information about the package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: Required. The number of drives included in the package. + :type drive_count: int + :param ship_date: Required. The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + 'drive_count': {'required': True}, + 'ship_date': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'int'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PackageInfomation, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.tracking_number = kwargs['tracking_number'] + self.drive_count = kwargs['drive_count'] + self.ship_date = kwargs['ship_date'] + + +class PutJobParameters(msrest.serialization.Model): + """Put Job parameters. + + :param location: Specifies the supported Azure location where the job should be created. + :type location: str + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: object + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(PutJobParameters, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.properties = kwargs.get('properties', None) + + +class ReturnAddress(msrest.serialization.Model): + """Specifies the return address information for the job. + + All required parameters must be populated in order to send to Azure. + + :param recipient_name: Required. The name of the recipient who will receive the hard drives + when they are returned. + :type recipient_name: str + :param street_address1: Required. The first line of the street address to use when returning + the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: Required. The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: Required. The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: Required. The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Required. Phone number of the recipient of the returned drives. + :type phone: str + :param email: Required. Email address of the recipient of the returned drives. + :type email: str + """ + + _validation = { + 'recipient_name': {'required': True}, + 'street_address1': {'required': True}, + 'city': {'required': True}, + 'postal_code': {'required': True}, + 'country_or_region': {'required': True}, + 'phone': {'required': True}, + 'email': {'required': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'email': {'key': 'email', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ReturnAddress, self).__init__(**kwargs) + self.recipient_name = kwargs['recipient_name'] + self.street_address1 = kwargs['street_address1'] + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs['city'] + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs['postal_code'] + self.country_or_region = kwargs['country_or_region'] + self.phone = kwargs['phone'] + self.email = kwargs['email'] + + +class ReturnShipping(msrest.serialization.Model): + """Specifies the return carrier and customer's account with the carrier. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The carrier's name. + :type carrier_name: str + :param carrier_account_number: Required. The customer's account number with the carrier. + :type carrier_account_number: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'carrier_account_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ReturnShipping, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.carrier_account_number = kwargs['carrier_account_number'] + + +class ShippingInformation(msrest.serialization.Model): + """Contains information about the Microsoft datacenter to which the drives should be shipped. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param recipient_name: The name of the recipient who will receive the hard drives when they are + returned. + :type recipient_name: str + :param street_address1: The first line of the street address to use when returning the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Phone number of the recipient of the returned drives. + :type phone: str + :ivar additional_information: Additional shipping information for customer, specific to + datacenter to which customer should send their disks. + :vartype additional_information: str + """ + + _validation = { + 'additional_information': {'readonly': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'additional_information': {'key': 'additionalInformation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ShippingInformation, self).__init__(**kwargs) + self.recipient_name = kwargs.get('recipient_name', None) + self.street_address1 = kwargs.get('street_address1', None) + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs.get('city', None) + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs.get('postal_code', None) + self.country_or_region = kwargs.get('country_or_region', None) + self.phone = kwargs.get('phone', None) + self.additional_information = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~storage_import_export.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~storage_import_export.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class UpdateJobParameters(msrest.serialization.Model): + """Update Job parameters. + + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: object + :param cancel_requested: If specified, the value must be true. The service will attempt to + cancel the job. + :type cancel_requested: bool + :param state: If specified, the value must be Shipping, which tells the Import/Export service + that the package for the job has been shipped. The ReturnAddress and DeliveryPackage properties + must have been set either in this request or in a previous request, otherwise the request will + fail. + :type state: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param log_level: Indicates whether error logging or verbose logging is enabled. + :type log_level: str + :param backup_drive_manifest: Indicates whether the manifest files on the drives should be + copied to block blobs. + :type backup_drive_manifest: bool + :param drive_list: List of drives that comprise the job. + :type drive_list: list[~storage_import_export.models.DriveStatus] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': 'object'}, + 'cancel_requested': {'key': 'properties.cancelRequested', 'type': 'bool'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'return_address': {'key': 'properties.returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'properties.returnShipping', 'type': 'ReturnShipping'}, + 'delivery_package': {'key': 'properties.deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'log_level': {'key': 'properties.logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'properties.backupDriveManifest', 'type': 'bool'}, + 'drive_list': {'key': 'properties.driveList', 'type': '[DriveStatus]'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateJobParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.cancel_requested = kwargs.get('cancel_requested', None) + self.state = kwargs.get('state', None) + self.return_address = kwargs.get('return_address', None) + self.return_shipping = kwargs.get('return_shipping', None) + self.delivery_package = kwargs.get('delivery_package', None) + self.log_level = kwargs.get('log_level', None) + self.backup_drive_manifest = kwargs.get('backup_drive_manifest', None) + self.drive_list = kwargs.get('drive_list', None) diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models_py3.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models_py3.py new file mode 100644 index 00000000000..18704f88ea5 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_models_py3.py @@ -0,0 +1,1118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._storage_import_export_enums import * + + +class DeliveryPackageInformation(msrest.serialization.Model): + """Contains information about the delivery package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: The number of drives included in the package. + :type drive_count: long + :param ship_date: The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + tracking_number: str, + drive_count: Optional[int] = None, + ship_date: Optional[str] = None, + **kwargs + ): + super(DeliveryPackageInformation, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.tracking_number = tracking_number + self.drive_count = drive_count + self.ship_date = ship_date + + +class DriveBitLockerKey(msrest.serialization.Model): + """BitLocker recovery key or password to the specified drive. + + :param bit_locker_key: BitLocker recovery key or password. + :type bit_locker_key: str + :param drive_id: Drive ID. + :type drive_id: str + """ + + _attribute_map = { + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'drive_id': {'key': 'driveId', 'type': 'str'}, + } + + def __init__( + self, + *, + bit_locker_key: Optional[str] = None, + drive_id: Optional[str] = None, + **kwargs + ): + super(DriveBitLockerKey, self).__init__(**kwargs) + self.bit_locker_key = bit_locker_key + self.drive_id = drive_id + + +class DriveStatus(msrest.serialization.Model): + """Provides information about the drive's status. + + :param drive_id: The drive's hardware serial number, without spaces. + :type drive_id: str + :param bit_locker_key: The BitLocker key used to encrypt the drive. + :type bit_locker_key: str + :param manifest_file: The relative path of the manifest file on the drive. + :type manifest_file: str + :param manifest_hash: The Base16-encoded MD5 hash of the manifest file on the drive. + :type manifest_hash: str + :param drive_header_hash: The drive header hash value. + :type drive_header_hash: str + :param state: The drive's current state. Possible values include: "Specified", "Received", + "NeverReceived", "Transferring", "Completed", "CompletedMoreInfo", "ShippedBack". + :type state: str or ~storage_import_export.models.DriveState + :param copy_status: Detailed status about the data transfer process. This field is not returned + in the response until the drive is in the Transferring state. + :type copy_status: str + :param percent_complete: Percentage completed for the drive. + :type percent_complete: long + :param verbose_log_uri: A URI that points to the blob containing the verbose log for the data + transfer operation. + :type verbose_log_uri: str + :param error_log_uri: A URI that points to the blob containing the error log for the data + transfer operation. + :type error_log_uri: str + :param manifest_uri: A URI that points to the blob containing the drive manifest file. + :type manifest_uri: str + :param bytes_succeeded: Bytes successfully transferred for the drive. + :type bytes_succeeded: long + """ + + _attribute_map = { + 'drive_id': {'key': 'driveId', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'drive_header_hash': {'key': 'driveHeaderHash', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'verbose_log_uri': {'key': 'verboseLogUri', 'type': 'str'}, + 'error_log_uri': {'key': 'errorLogUri', 'type': 'str'}, + 'manifest_uri': {'key': 'manifestUri', 'type': 'str'}, + 'bytes_succeeded': {'key': 'bytesSucceeded', 'type': 'long'}, + } + + def __init__( + self, + *, + drive_id: Optional[str] = None, + bit_locker_key: Optional[str] = None, + manifest_file: Optional[str] = None, + manifest_hash: Optional[str] = None, + drive_header_hash: Optional[str] = None, + state: Optional[Union[str, "DriveState"]] = None, + copy_status: Optional[str] = None, + percent_complete: Optional[int] = None, + verbose_log_uri: Optional[str] = None, + error_log_uri: Optional[str] = None, + manifest_uri: Optional[str] = None, + bytes_succeeded: Optional[int] = None, + **kwargs + ): + super(DriveStatus, self).__init__(**kwargs) + self.drive_id = drive_id + self.bit_locker_key = bit_locker_key + self.manifest_file = manifest_file + self.manifest_hash = manifest_hash + self.drive_header_hash = drive_header_hash + self.state = state + self.copy_status = copy_status + self.percent_complete = percent_complete + self.verbose_log_uri = verbose_log_uri + self.error_log_uri = error_log_uri + self.manifest_uri = manifest_uri + self.bytes_succeeded = bytes_succeeded + + +class EncryptionKeyDetails(msrest.serialization.Model): + """Specifies the encryption key properties. + + :param kek_type: The type of kek encryption key. Possible values include: "MicrosoftManaged", + "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~storage_import_export.models.EncryptionKekType + :param kek_url: Specifies the url for kek encryption key. + :type kek_url: str + :param kek_vault_resource_id: Specifies the keyvault resource id for kek encryption key. + :type kek_vault_resource_id: str + """ + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + *, + kek_type: Optional[Union[str, "EncryptionKekType"]] = "MicrosoftManaged", + kek_url: Optional[str] = None, + kek_vault_resource_id: Optional[str] = None, + **kwargs + ): + super(EncryptionKeyDetails, self).__init__(**kwargs) + self.kek_type = kek_type + self.kek_url = kek_url + self.kek_vault_resource_id = kek_vault_resource_id + + +class ErrorResponse(msrest.serialization.Model): + """Response when errors occurred. + + :param code: Provides information about the error code. + :type code: str + :param message: Provides information about the error message. + :type message: str + :param target: Provides information about the error target. + :type target: str + :param details: Describes the error details if present. + :type details: list[~storage_import_export.models.ErrorResponseErrorDetailsItem] + :param innererror: Inner error object if present. + :type innererror: object + """ + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[ErrorResponseErrorDetailsItem]'}, + 'innererror': {'key': 'error.innererror', 'type': 'object'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorResponseErrorDetailsItem"]] = None, + innererror: Optional[object] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + self.innererror = innererror + + +class ErrorResponseErrorDetailsItem(msrest.serialization.Model): + """ErrorResponseErrorDetailsItem. + + :param code: Provides information about the error code. + :type code: str + :param target: Provides information about the error target. + :type target: str + :param message: Provides information about the error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + target: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + super(ErrorResponseErrorDetailsItem, self).__init__(**kwargs) + self.code = code + self.target = target + self.message = message + + +class Export(msrest.serialization.Model): + """A property containing information about the blobs to be exported for an export job. This property is required for export jobs, but must not be specified for import jobs. + + :param blob_list_blob_path: The relative URI to the block blob that contains the list of blob + paths or blob path prefixes as defined above, beginning with the container name. If the blob is + in root container, the URI must begin with $root. + :type blob_list_blob_path: str + :param blob_path: A collection of blob-path strings. + :type blob_path: list[str] + :param blob_path_prefix: A collection of blob-prefix strings. + :type blob_path_prefix: list[str] + """ + + _attribute_map = { + 'blob_list_blob_path': {'key': 'blobListBlobPath', 'type': 'str'}, + 'blob_path': {'key': 'blobList.blobPath', 'type': '[str]'}, + 'blob_path_prefix': {'key': 'blobList.blobPathPrefix', 'type': '[str]'}, + } + + def __init__( + self, + *, + blob_list_blob_path: Optional[str] = None, + blob_path: Optional[List[str]] = None, + blob_path_prefix: Optional[List[str]] = None, + **kwargs + ): + super(Export, self).__init__(**kwargs) + self.blob_list_blob_path = blob_list_blob_path + self.blob_path = blob_path + self.blob_path_prefix = blob_path_prefix + + +class GetBitLockerKeysResponse(msrest.serialization.Model): + """GetBitLockerKeys response. + + :param value: drive status. + :type value: list[~storage_import_export.models.DriveBitLockerKey] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DriveBitLockerKey]'}, + } + + def __init__( + self, + *, + value: Optional[List["DriveBitLockerKey"]] = None, + **kwargs + ): + super(GetBitLockerKeysResponse, self).__init__(**kwargs) + self.value = value + + +class IdentityDetails(msrest.serialization.Model): + """Specifies the identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: The type of identity. Possible values include: "None", "SystemAssigned", + "UserAssigned". Default value: "None". + :type type: str or ~storage_import_export.models.IdentityType + :ivar principal_id: Specifies the principal id for the identity for the job. + :vartype principal_id: str + :ivar tenant_id: Specifies the tenant id for the identity for the job. + :vartype tenant_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "IdentityType"]] = "None", + **kwargs + ): + super(IdentityDetails, self).__init__(**kwargs) + self.type = type + self.principal_id = None + self.tenant_id = None + + +class JobDetails(msrest.serialization.Model): + """Specifies the job properties. + + :param storage_account_id: The resource identifier of the storage account where data will be + imported to or exported from. + :type storage_account_id: str + :param job_type: The type of job. + :type job_type: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param shipping_information: Contains information about the Microsoft datacenter to which the + drives should be shipped. + :type shipping_information: ~storage_import_export.models.ShippingInformation + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param return_package: Contains information about the package being shipped from the Microsoft + data center to the customer to return the drives. The format is the same as the deliveryPackage + property above. This property is not included if the drives have not yet been returned. + :type return_package: ~storage_import_export.models.PackageInfomation + :param diagnostics_path: The virtual blob directory to which the copy logs and backups of drive + manifest files (if enabled) will be stored. + :type diagnostics_path: str + :param log_level: Default value is Error. Indicates whether error logging or verbose logging + will be enabled. + :type log_level: str + :param backup_drive_manifest: Default value is false. Indicates whether the manifest files on + the drives should be copied to block blobs. + :type backup_drive_manifest: bool + :param state: Current state of the job. + :type state: str + :param cancel_requested: Indicates whether a request has been submitted to cancel the job. + :type cancel_requested: bool + :param percent_complete: Overall percentage completed for the job. + :type percent_complete: int + :param incomplete_blob_list_uri: A blob path that points to a block blob containing a list of + blob names that were not exported due to insufficient drive space. If all blobs were exported + successfully, then this element is not included in the response. + :type incomplete_blob_list_uri: str + :param drive_list: List of up to ten drives that comprise the job. The drive list is a required + element for an import job; it is not specified for export jobs. + :type drive_list: list[~storage_import_export.models.DriveStatus] + :param export: A property containing information about the blobs to be exported for an export + job. This property is included for export jobs only. + :type export: ~storage_import_export.models.Export + :param provisioning_state: Specifies the provisioning state of the job. + :type provisioning_state: str + :param encryption_key: Contains information about the encryption key. + :type encryption_key: ~storage_import_export.models.EncryptionKeyDetails + """ + + _attribute_map = { + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'return_address': {'key': 'returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'returnShipping', 'type': 'ReturnShipping'}, + 'shipping_information': {'key': 'shippingInformation', 'type': 'ShippingInformation'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageInfomation'}, + 'diagnostics_path': {'key': 'diagnosticsPath', 'type': 'str'}, + 'log_level': {'key': 'logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'backupDriveManifest', 'type': 'bool'}, + 'state': {'key': 'state', 'type': 'str'}, + 'cancel_requested': {'key': 'cancelRequested', 'type': 'bool'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, + 'incomplete_blob_list_uri': {'key': 'incompleteBlobListUri', 'type': 'str'}, + 'drive_list': {'key': 'driveList', 'type': '[DriveStatus]'}, + 'export': {'key': 'export', 'type': 'Export'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'encryption_key': {'key': 'encryptionKey', 'type': 'EncryptionKeyDetails'}, + } + + def __init__( + self, + *, + storage_account_id: Optional[str] = None, + job_type: Optional[str] = None, + return_address: Optional["ReturnAddress"] = None, + return_shipping: Optional["ReturnShipping"] = None, + shipping_information: Optional["ShippingInformation"] = None, + delivery_package: Optional["DeliveryPackageInformation"] = None, + return_package: Optional["PackageInfomation"] = None, + diagnostics_path: Optional[str] = None, + log_level: Optional[str] = None, + backup_drive_manifest: Optional[bool] = None, + state: Optional[str] = None, + cancel_requested: Optional[bool] = None, + percent_complete: Optional[int] = None, + incomplete_blob_list_uri: Optional[str] = None, + drive_list: Optional[List["DriveStatus"]] = None, + export: Optional["Export"] = None, + provisioning_state: Optional[str] = None, + encryption_key: Optional["EncryptionKeyDetails"] = None, + **kwargs + ): + super(JobDetails, self).__init__(**kwargs) + self.storage_account_id = storage_account_id + self.job_type = job_type + self.return_address = return_address + self.return_shipping = return_shipping + self.shipping_information = shipping_information + self.delivery_package = delivery_package + self.return_package = return_package + self.diagnostics_path = diagnostics_path + self.log_level = log_level + self.backup_drive_manifest = backup_drive_manifest + self.state = state + self.cancel_requested = cancel_requested + self.percent_complete = percent_complete + self.incomplete_blob_list_uri = incomplete_blob_list_uri + self.drive_list = drive_list + self.export = export + self.provisioning_state = provisioning_state + self.encryption_key = encryption_key + + +class JobResponse(msrest.serialization.Model): + """Contains the job information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_data: SystemData of ImportExport Jobs. + :vartype system_data: ~storage_import_export.models.SystemData + :ivar id: Specifies the resource identifier of the job. + :vartype id: str + :ivar name: Specifies the name of the job. + :vartype name: str + :ivar type: Specifies the type of the job resource. + :vartype type: str + :param location: Specifies the Azure location where the job is created. + :type location: str + :param tags: A set of tags. Specifies the tags that are assigned to the job. + :type tags: object + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + :param identity: Specifies the job identity details. + :type identity: ~storage_import_export.models.IdentityDetails + """ + + _validation = { + 'system_data': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + 'identity': {'key': 'identity', 'type': 'IdentityDetails'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[object] = None, + properties: Optional["JobDetails"] = None, + identity: Optional["IdentityDetails"] = None, + **kwargs + ): + super(JobResponse, self).__init__(**kwargs) + self.system_data = None + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.properties = properties + self.identity = identity + + +class ListJobsResponse(msrest.serialization.Model): + """List jobs response. + + :param next_link: link to next batch of jobs. + :type next_link: str + :param value: Job list. + :type value: list[~storage_import_export.models.JobResponse] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobResponse]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["JobResponse"]] = None, + **kwargs + ): + super(ListJobsResponse, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ListOperationsResponse(msrest.serialization.Model): + """List operations response. + + :param value: operations. + :type value: list[~storage_import_export.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + *, + value: Optional[List["Operation"]] = None, + **kwargs + ): + super(ListOperationsResponse, self).__init__(**kwargs) + self.value = value + + +class Location(msrest.serialization.Model): + """Provides information about an Azure data center location. + + :param id: Specifies the resource identifier of the location. + :type id: str + :param name: Specifies the name of the location. Use List Locations to get all supported + locations. + :type name: str + :param type: Specifies the type of the location. + :type type: str + :param recipient_name: The recipient name to use when shipping the drives to the Azure data + center. + :type recipient_name: str + :param street_address1: The first line of the street address to use when shipping the drives to + the Azure data center. + :type street_address1: str + :param street_address2: The second line of the street address to use when shipping the drives + to the Azure data center. + :type street_address2: str + :param city: The city name to use when shipping the drives to the Azure data center. + :type city: str + :param state_or_province: The state or province to use when shipping the drives to the Azure + data center. + :type state_or_province: str + :param postal_code: The postal code to use when shipping the drives to the Azure data center. + :type postal_code: str + :param country_or_region: The country or region to use when shipping the drives to the Azure + data center. + :type country_or_region: str + :param phone: The phone number for the Azure data center. + :type phone: str + :param additional_shipping_information: Additional shipping information for customer, specific + to datacenter to which customer should send their disks. + :type additional_shipping_information: str + :param supported_carriers: A list of carriers that are supported at this location. + :type supported_carriers: list[str] + :param alternate_locations: A list of location IDs that should be used to ship shipping drives + to for jobs created against the current location. If the current location is active, it will be + part of the list. If it is temporarily closed due to maintenance, this list may contain other + locations. + :type alternate_locations: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recipient_name': {'key': 'properties.recipientName', 'type': 'str'}, + 'street_address1': {'key': 'properties.streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'properties.streetAddress2', 'type': 'str'}, + 'city': {'key': 'properties.city', 'type': 'str'}, + 'state_or_province': {'key': 'properties.stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'properties.postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'properties.countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'properties.phone', 'type': 'str'}, + 'additional_shipping_information': {'key': 'properties.additionalShippingInformation', 'type': 'str'}, + 'supported_carriers': {'key': 'properties.supportedCarriers', 'type': '[str]'}, + 'alternate_locations': {'key': 'properties.alternateLocations', 'type': '[str]'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + name: Optional[str] = None, + type: Optional[str] = None, + recipient_name: Optional[str] = None, + street_address1: Optional[str] = None, + street_address2: Optional[str] = None, + city: Optional[str] = None, + state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, + country_or_region: Optional[str] = None, + phone: Optional[str] = None, + additional_shipping_information: Optional[str] = None, + supported_carriers: Optional[List[str]] = None, + alternate_locations: Optional[List[str]] = None, + **kwargs + ): + super(Location, self).__init__(**kwargs) + self.id = id + self.name = name + self.type = type + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.additional_shipping_information = additional_shipping_information + self.supported_carriers = supported_carriers + self.alternate_locations = alternate_locations + + +class LocationsResponse(msrest.serialization.Model): + """Locations response. + + :param value: locations. + :type value: list[~storage_import_export.models.Location] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Location]'}, + } + + def __init__( + self, + *, + value: Optional[List["Location"]] = None, + **kwargs + ): + super(LocationsResponse, self).__init__(**kwargs) + self.value = value + + +class Operation(msrest.serialization.Model): + """Describes a supported operation by the Storage Import/Export job API. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the operation. + :type name: str + :param provider: The resource provider name to which the operation belongs. + :type provider: str + :param resource: The name of the resource to which the operation belongs. + :type resource: str + :param operation: The display name of the operation. + :type operation: str + :param description: Short description of the operation. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'provider': {'key': 'display.provider', 'type': 'str'}, + 'resource': {'key': 'display.resource', 'type': 'str'}, + 'operation': {'key': 'display.operation', 'type': 'str'}, + 'description': {'key': 'display.description', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class PackageInfomation(msrest.serialization.Model): + """Contains information about the package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: Required. The number of drives included in the package. + :type drive_count: int + :param ship_date: Required. The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + 'drive_count': {'required': True}, + 'ship_date': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'int'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + tracking_number: str, + drive_count: int, + ship_date: str, + **kwargs + ): + super(PackageInfomation, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.tracking_number = tracking_number + self.drive_count = drive_count + self.ship_date = ship_date + + +class PutJobParameters(msrest.serialization.Model): + """Put Job parameters. + + :param location: Specifies the supported Azure location where the job should be created. + :type location: str + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: object + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[object] = None, + properties: Optional["JobDetails"] = None, + **kwargs + ): + super(PutJobParameters, self).__init__(**kwargs) + self.location = location + self.tags = tags + self.properties = properties + + +class ReturnAddress(msrest.serialization.Model): + """Specifies the return address information for the job. + + All required parameters must be populated in order to send to Azure. + + :param recipient_name: Required. The name of the recipient who will receive the hard drives + when they are returned. + :type recipient_name: str + :param street_address1: Required. The first line of the street address to use when returning + the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: Required. The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: Required. The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: Required. The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Required. Phone number of the recipient of the returned drives. + :type phone: str + :param email: Required. Email address of the recipient of the returned drives. + :type email: str + """ + + _validation = { + 'recipient_name': {'required': True}, + 'street_address1': {'required': True}, + 'city': {'required': True}, + 'postal_code': {'required': True}, + 'country_or_region': {'required': True}, + 'phone': {'required': True}, + 'email': {'required': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'email': {'key': 'email', 'type': 'str'}, + } + + def __init__( + self, + *, + recipient_name: str, + street_address1: str, + city: str, + postal_code: str, + country_or_region: str, + phone: str, + email: str, + street_address2: Optional[str] = None, + state_or_province: Optional[str] = None, + **kwargs + ): + super(ReturnAddress, self).__init__(**kwargs) + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.email = email + + +class ReturnShipping(msrest.serialization.Model): + """Specifies the return carrier and customer's account with the carrier. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The carrier's name. + :type carrier_name: str + :param carrier_account_number: Required. The customer's account number with the carrier. + :type carrier_account_number: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'carrier_account_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + carrier_account_number: str, + **kwargs + ): + super(ReturnShipping, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.carrier_account_number = carrier_account_number + + +class ShippingInformation(msrest.serialization.Model): + """Contains information about the Microsoft datacenter to which the drives should be shipped. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param recipient_name: The name of the recipient who will receive the hard drives when they are + returned. + :type recipient_name: str + :param street_address1: The first line of the street address to use when returning the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Phone number of the recipient of the returned drives. + :type phone: str + :ivar additional_information: Additional shipping information for customer, specific to + datacenter to which customer should send their disks. + :vartype additional_information: str + """ + + _validation = { + 'additional_information': {'readonly': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'additional_information': {'key': 'additionalInformation', 'type': 'str'}, + } + + def __init__( + self, + *, + recipient_name: Optional[str] = None, + street_address1: Optional[str] = None, + street_address2: Optional[str] = None, + city: Optional[str] = None, + state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, + country_or_region: Optional[str] = None, + phone: Optional[str] = None, + **kwargs + ): + super(ShippingInformation, self).__init__(**kwargs) + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.additional_information = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~storage_import_export.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~storage_import_export.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class UpdateJobParameters(msrest.serialization.Model): + """Update Job parameters. + + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: object + :param cancel_requested: If specified, the value must be true. The service will attempt to + cancel the job. + :type cancel_requested: bool + :param state: If specified, the value must be Shipping, which tells the Import/Export service + that the package for the job has been shipped. The ReturnAddress and DeliveryPackage properties + must have been set either in this request or in a previous request, otherwise the request will + fail. + :type state: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param log_level: Indicates whether error logging or verbose logging is enabled. + :type log_level: str + :param backup_drive_manifest: Indicates whether the manifest files on the drives should be + copied to block blobs. + :type backup_drive_manifest: bool + :param drive_list: List of drives that comprise the job. + :type drive_list: list[~storage_import_export.models.DriveStatus] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': 'object'}, + 'cancel_requested': {'key': 'properties.cancelRequested', 'type': 'bool'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'return_address': {'key': 'properties.returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'properties.returnShipping', 'type': 'ReturnShipping'}, + 'delivery_package': {'key': 'properties.deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'log_level': {'key': 'properties.logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'properties.backupDriveManifest', 'type': 'bool'}, + 'drive_list': {'key': 'properties.driveList', 'type': '[DriveStatus]'}, + } + + def __init__( + self, + *, + tags: Optional[object] = None, + cancel_requested: Optional[bool] = None, + state: Optional[str] = None, + return_address: Optional["ReturnAddress"] = None, + return_shipping: Optional["ReturnShipping"] = None, + delivery_package: Optional["DeliveryPackageInformation"] = None, + log_level: Optional[str] = None, + backup_drive_manifest: Optional[bool] = None, + drive_list: Optional[List["DriveStatus"]] = None, + **kwargs + ): + super(UpdateJobParameters, self).__init__(**kwargs) + self.tags = tags + self.cancel_requested = cancel_requested + self.state = state + self.return_address = return_address + self.return_shipping = return_shipping + self.delivery_package = delivery_package + self.log_level = log_level + self.backup_drive_manifest = backup_drive_manifest + self.drive_list = drive_list diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_storage_import_export_enums.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_storage_import_export_enums.py new file mode 100644 index 00000000000..397faf89d47 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/models/_storage_import_export_enums.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class DriveState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The drive's current state. + """ + + SPECIFIED = "Specified" + RECEIVED = "Received" + NEVER_RECEIVED = "NeverReceived" + TRANSFERRING = "Transferring" + COMPLETED = "Completed" + COMPLETED_MORE_INFO = "CompletedMoreInfo" + SHIPPED_BACK = "ShippedBack" + +class EncryptionKekType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of kek encryption key + """ + + MICROSOFT_MANAGED = "MicrosoftManaged" + CUSTOMER_MANAGED = "CustomerManaged" + +class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/__init__.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/__init__.py new file mode 100644 index 00000000000..e1f9bbe57f6 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._locations_operations import LocationsOperations +from ._jobs_operations import JobsOperations +from ._bit_locker_keys_operations import BitLockerKeysOperations +from ._operations import Operations + +__all__ = [ + 'LocationsOperations', + 'JobsOperations', + 'BitLockerKeysOperations', + 'Operations', +] diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_bit_locker_keys_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_bit_locker_keys_operations.py new file mode 100644 index 00000000000..9dab4d504c2 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_bit_locker_keys_operations.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BitLockerKeysOperations(object): + """BitLockerKeysOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.GetBitLockerKeysResponse"] + """Returns the BitLocker Keys for all drives in the specified job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GetBitLockerKeysResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.GetBitLockerKeysResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.GetBitLockerKeysResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('GetBitLockerKeysResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}/listBitLockerKeys'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_jobs_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_jobs_operations.py new file mode 100644 index 00000000000..d35bd5d1ec2 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_jobs_operations.py @@ -0,0 +1,489 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class JobsOperations(object): + """JobsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_subscription( + self, + top=None, # type: Optional[int] + filter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListJobsResponse"] + """Returns all active and completed jobs in a subscription. + + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: int + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + top=None, # type: Optional[int] + filter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListJobsResponse"] + """Returns all active and completed jobs in a resource group. + + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: int + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def get( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.JobResponse" + """Gets information about an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def update( + self, + job_name, # type: str + resource_group_name, # type: str + body, # type: "models.UpdateJobParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.JobResponse" + """Updates specific properties of a job. You can call this operation to notify the Import/Export + service that the hard drives comprising the import or export job have been shipped to the + Microsoft data center. It can also be used to cancel an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters to update in the job. + :type body: ~storage_import_export.models.UpdateJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'UpdateJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def create( + self, + job_name, # type: str + resource_group_name, # type: str + body, # type: "models.PutJobParameters" + client_tenant_id=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.JobResponse" + """Creates a new job or updates an existing job in the specified subscription. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters used for creating the job. + :type body: ~storage_import_export.models.PutJobParameters + :param client_tenant_id: The tenant ID of the client making the request. + :type client_tenant_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + if client_tenant_id is not None: + header_parameters['x-ms-client-tenant-id'] = self._serialize.header("client_tenant_id", client_tenant_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'PutJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def delete( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an existing job. Only jobs in the Creating or Completed states can be deleted. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_locations_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_locations_operations.py new file mode 100644 index 00000000000..c820040af3b --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_locations_operations.py @@ -0,0 +1,171 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LocationsOperations(object): + """LocationsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.LocationsResponse"] + """Returns a list of locations to which you can ship the disks associated with an import or export + job. A location is a Microsoft data center region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LocationsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.LocationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LocationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LocationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/locations'} # type: ignore + + def get( + self, + location_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Location" + """Returns the details about a location to which you can ship the disks associated with an import + or export job. A location is an Azure region. + + :param location_name: The name of the location. For example, West US or westus. + :type location_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Location, or the result of cls(response) + :rtype: ~storage_import_export.models.Location + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Location"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Location', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/providers/Microsoft.ImportExport/locations/{locationName}'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_operations.py b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_operations.py new file mode 100644 index 00000000000..d66f0c6bd20 --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/operations/_operations.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListOperationsResponse"] + """Returns the list of operations supported by the import/export resource provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListOperationsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListOperationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListOperationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2016-11-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListOperationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/operations'} # type: ignore diff --git a/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/py.typed b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/storageimportexport/azext_storageimportexport/vendored_sdks/storageimportexport/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/storageimportexport/report.md b/src/storageimportexport/report.md new file mode 100644 index 00000000000..1aa001959e2 --- /dev/null +++ b/src/storageimportexport/report.md @@ -0,0 +1,204 @@ +# Azure CLI Module Creation Report + +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az storageimportexport|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az storageimportexport` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az storageimportexport location|Locations|[commands](#CommandsInLocations)| +|az storageimportexport job|Jobs|[commands](#CommandsInJobs)| +|az storageimportexport bit-locker-key|BitLockerKeys|[commands](#CommandsInBitLockerKeys)| + +## COMMANDS +### Commands in `az storageimportexport bit-locker-key` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az storageimportexport bit-locker-key list](#BitLockerKeysList)|List|[Parameters](#ParametersBitLockerKeysList)|[Example](#ExamplesBitLockerKeysList)| + +### Commands in `az storageimportexport job` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az storageimportexport job list](#JobsListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersJobsListByResourceGroup)|[Example](#ExamplesJobsListByResourceGroup)| +|[az storageimportexport job list](#JobsListBySubscription)|ListBySubscription|[Parameters](#ParametersJobsListBySubscription)|[Example](#ExamplesJobsListBySubscription)| +|[az storageimportexport job show](#JobsGet)|Get|[Parameters](#ParametersJobsGet)|[Example](#ExamplesJobsGet)| +|[az storageimportexport job create](#JobsCreate)|Create|[Parameters](#ParametersJobsCreate)|[Example](#ExamplesJobsCreate)| +|[az storageimportexport job update](#JobsUpdate)|Update|[Parameters](#ParametersJobsUpdate)|[Example](#ExamplesJobsUpdate)| +|[az storageimportexport job delete](#JobsDelete)|Delete|[Parameters](#ParametersJobsDelete)|[Example](#ExamplesJobsDelete)| + +### Commands in `az storageimportexport location` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az storageimportexport location list](#LocationsList)|List|[Parameters](#ParametersLocationsList)|[Example](#ExamplesLocationsList)| +|[az storageimportexport location show](#LocationsGet)|Get|[Parameters](#ParametersLocationsGet)|[Example](#ExamplesLocationsGet)| + + +## COMMAND DETAILS + +### group `az storageimportexport bit-locker-key` +#### Command `az storageimportexport bit-locker-key list` + +##### Example +``` +az storageimportexport bit-locker-key list --job-name "myJob" --resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the import/export job.|job_name|jobName| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| + +### group `az storageimportexport job` +#### Command `az storageimportexport job list` + +##### Example +``` +az storageimportexport job list --resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| +|**--top**|integer|An integer value that specifies how many jobs at most should be returned. The value cannot exceed 100.|top|$top| +|**--filter**|string|Can be used to restrict the results to certain conditions.|filter|$filter| + +#### Command `az storageimportexport job list` + +##### Example +``` +az storageimportexport job list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az storageimportexport job show` + +##### Example +``` +az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" +``` +##### Example +``` +az storageimportexport job show --name "myJob" --resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the import/export job.|job_name|jobName| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| + +#### Command `az storageimportexport job create` + +##### Example +``` +az storageimportexport job create --location "West US" --backup-drive-manifest true --diagnostics-path \ +"waimportexport" --export blob-path-prefix="/" --job-type "Export" --log-level "Verbose" --return-address \ +city="Redmond" country-or-region="USA" email="Test@contoso.com" phone="4250000000" postal-code="98007" \ +recipient-name="Test" state-or-province="wa" street-address1="Street1" street-address2="street2" --return-shipping \ +carrier-account-number="989ffff" carrier-name="FedEx" --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxx\ +xxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" --name "myExportJob" \ +--resource-group "myResourceGroup" +``` +##### Example +``` +az storageimportexport job create --location "West US" --backup-drive-manifest true --diagnostics-path \ +"waimportexport" --drive-list bit-locker-key="238810-662376-448998-450120-652806-203390-606320-483076" \ +drive-header-hash="0:1048576:FB6B6ED500D49DA6E0D723C98D42C657F2881CC13357C28DCECA6A524F1292501571A321238540E621AB5BD9C9\ +A32637615919A75593E6CB5C1515DAE341CABF;135266304:143360:C957A189AFC38C4E80731252301EB91427CE55E61448FA3C73C6FDDE70ABBC1\ +97947EC8D0249A2C639BB10B95957D5820A4BE8DFBBF76FFFA688AE5CE0D42EC3" drive-id="9CA995BB" manifest-file="\\\\8a0c23f7-14b7\ +-470a-9633-fcd46590a1bc.manifest" manifest-hash="4228EC5D8E048CB9B515338C789314BE8D0B2FDBC7C7A0308E1C826242CDE74E" \ +--job-type "Import" --log-level "Verbose" --return-address city="Redmond" country-or-region="USA" \ +email="Test@contoso.com" phone="4250000000" postal-code="98007" recipient-name="Test" state-or-province="wa" \ +street-address1="Street1" street-address2="street2" --return-shipping carrier-account-number="989ffff" \ +carrier-name="FedEx" --storage-account-id "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourc\ +eGroup/providers/Microsoft.ClassicStorage/storageAccounts/test" --name "myJob" --resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the import/export job.|job_name|jobName| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| +|**--client-tenant-id**|string|The tenant ID of the client making the request.|client_tenant_id|ClientTenantId| +|**--location**|string|Specifies the supported Azure location where the job should be created|location|location| +|**--tags**|any|Specifies the tags that will be assigned to the job.|tags|tags| +|**--storage-account-id**|string|The resource identifier of the storage account where data will be imported to or exported from.|storage_account_id|storageAccountId| +|**--job-type**|string|The type of job|job_type|jobType| +|**--return-address**|object|Specifies the return address information for the job. |return_address|returnAddress| +|**--return-shipping**|object|Specifies the return carrier and customer's account with the carrier. |return_shipping|returnShipping| +|**--shipping-information**|object|Contains information about the Microsoft datacenter to which the drives should be shipped. |shipping_information|shippingInformation| +|**--delivery-package**|object|Contains information about the package being shipped by the customer to the Microsoft data center. |delivery_package|deliveryPackage| +|**--return-package**|object|Contains information about the package being shipped from the Microsoft data center to the customer to return the drives. The format is the same as the deliveryPackage property above. This property is not included if the drives have not yet been returned. |return_package|returnPackage| +|**--diagnostics-path**|string|The virtual blob directory to which the copy logs and backups of drive manifest files (if enabled) will be stored.|diagnostics_path|diagnosticsPath| +|**--log-level**|string|Default value is Error. Indicates whether error logging or verbose logging will be enabled.|log_level|logLevel| +|**--backup-drive-manifest**|boolean|Default value is false. Indicates whether the manifest files on the drives should be copied to block blobs.|backup_drive_manifest|backupDriveManifest| +|**--state**|string|Current state of the job.|state|state| +|**--cancel-requested**|boolean|Indicates whether a request has been submitted to cancel the job.|cancel_requested|cancelRequested| +|**--percent-complete**|integer|Overall percentage completed for the job.|percent_complete|percentComplete| +|**--incomplete-blob-list-uri**|string|A blob path that points to a block blob containing a list of blob names that were not exported due to insufficient drive space. If all blobs were exported successfully, then this element is not included in the response.|incomplete_blob_list_uri|incompleteBlobListUri| +|**--drive-list**|array|List of up to ten drives that comprise the job. The drive list is a required element for an import job; it is not specified for export jobs.|drive_list|driveList| +|**--export**|object|A property containing information about the blobs to be exported for an export job. This property is included for export jobs only.|export|export| +|**--provisioning-state**|string|Specifies the provisioning state of the job.|provisioning_state|provisioningState| +|**--encryption-key**|object|Contains information about the encryption key.|encryption_key|encryptionKey| + +#### Command `az storageimportexport job update` + +##### Example +``` +az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name "myExportJob" \ +--resource-group "myResourceGroup" +``` +##### Example +``` +az storageimportexport job update --backup-drive-manifest true --log-level "Verbose" --state "" --name "myJob" \ +--resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the import/export job.|job_name|jobName| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| +|**--tags**|any|Specifies the tags that will be assigned to the job|tags|tags| +|**--cancel-requested**|boolean|If specified, the value must be true. The service will attempt to cancel the job. |cancel_requested|cancelRequested| +|**--state**|string|If specified, the value must be Shipping, which tells the Import/Export service that the package for the job has been shipped. The ReturnAddress and DeliveryPackage properties must have been set either in this request or in a previous request, otherwise the request will fail. |state|state| +|**--return-address**|object|Specifies the return address information for the job.|return_address|returnAddress| +|**--return-shipping**|object|Specifies the return carrier and customer's account with the carrier.|return_shipping|returnShipping| +|**--delivery-package**|object|Contains information about the package being shipped by the customer to the Microsoft data center.|delivery_package|deliveryPackage| +|**--log-level**|string|Indicates whether error logging or verbose logging is enabled.|log_level|logLevel| +|**--backup-drive-manifest**|boolean|Indicates whether the manifest files on the drives should be copied to block blobs.|backup_drive_manifest|backupDriveManifest| +|**--drive-list**|array|List of drives that comprise the job.|drive_list|driveList| + +#### Command `az storageimportexport job delete` + +##### Example +``` +az storageimportexport job delete --name "myJob" --resource-group "myResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the import/export job.|job_name|jobName| +|**--resource-group-name**|string|The resource group name uniquely identifies the resource group within the user subscription.|resource_group_name|resourceGroupName| + +### group `az storageimportexport location` +#### Command `az storageimportexport location list` + +##### Example +``` +az storageimportexport location list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az storageimportexport location show` + +##### Example +``` +az storageimportexport location show --name "West US" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location-name**|string|The name of the location. For example, West US or westus.|location_name|locationName| diff --git a/src/storageimportexport/setup.cfg b/src/storageimportexport/setup.cfg new file mode 100644 index 00000000000..2fdd96e5d39 --- /dev/null +++ b/src/storageimportexport/setup.cfg @@ -0,0 +1 @@ +#setup.cfg \ No newline at end of file diff --git a/src/storageimportexport/setup.py b/src/storageimportexport/setup.py new file mode 100644 index 00000000000..ee7d0212f69 --- /dev/null +++ b/src/storageimportexport/setup.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + + +from codecs import open +from setuptools import setup, find_packages + +# HISTORY.rst entry. +VERSION = '0.1.0' +try: + from azext_storageimportexport.manual.version import VERSION +except ImportError: + pass + +# The full list of classifiers is available at +# https://pypi.python.org/pypi?%3Aaction=list_classifiers +CLASSIFIERS = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: MIT License', +] + +DEPENDENCIES = [] + +try: + from azext_storageimportexport.manual.dependency import DEPENDENCIES +except ImportError: + pass + +with open('README.md', 'r', encoding='utf-8') as f: + README = f.read() +with open('HISTORY.rst', 'r', encoding='utf-8') as f: + HISTORY = f.read() + +setup( + name='storageimportexport', + version=VERSION, + description='Microsoft Azure Command-Line Tools StorageImportExport Extension', + author='Microsoft Corporation', + author_email='azpycli@microsoft.com', + url='https://github.com/Azure/azure-cli-extensions/tree/master/src/storageimportexport', + long_description=README + '\n\n' + HISTORY, + license='MIT', + classifiers=CLASSIFIERS, + packages=find_packages(), + install_requires=DEPENDENCIES, + package_data={'azext_storageimportexport': ['azext_metadata.json']}, +)